From ea0402cb323cbe4fe9996e1855497fc68fc911ce Mon Sep 17 00:00:00 2001 From: speakeasybot Date: Mon, 8 Jul 2024 17:41:06 +0000 Subject: [PATCH] ci: regenerated with OpenAPI Doc , Speakeasy CLI 1.327.0 --- .gitignore | 1 + .speakeasy/gen.lock | 47 +- .speakeasy/gen.yaml | 6 +- .speakeasy/workflow.lock | 10 +- README.md | 198 +- RELEASES.md | 12 +- USAGE.md | 86 +- docs/models/components/domainschema.md | 2 +- docs/models/components/linkschema.md | 36 +- docs/models/components/workspaceschema.md | 8 +- .../operations/createdomainrequestbody.md | 4 +- .../operations/createlinkrequestbody.md | 24 +- docs/models/operations/data.md | 22 +- .../operations/getmetatagsresponsebody.md | 6 +- docs/models/operations/requestbody.md | 24 +- .../operations/trackcustomerresponsebody.md | 6 +- .../models/operations/trackleadrequestbody.md | 6 +- .../operations/trackleadresponsebody.md | 6 +- .../models/operations/tracksalerequestbody.md | 2 +- .../operations/tracksaleresponsebody.md | 2 +- .../operations/updatedomainrequestbody.md | 4 +- .../operations/updatelinkrequestbody.md | 26 +- docs/models/operations/updatetagrequest.md | 2 +- .../operations/updateworkspacerequest.md | 7 +- .../operations/updateworkspacerequestbody.md | 9 + .../operations/upsertlinkrequestbody.md | 24 +- docs/sdks/analytics/README.md | 21 +- docs/sdks/domains/README.md | 53 +- docs/sdks/links/README.md | 124 +- docs/sdks/metatags/README.md | 11 +- docs/sdks/qrcodes/README.md | 11 +- docs/sdks/tags/README.md | 36 +- docs/sdks/track/README.md | 42 +- docs/sdks/workspaces/README.md | 27 +- poetry.lock | 461 +++ pylintrc | 43 +- pyproject.toml | 29 + scripts/publish.sh | 8 +- setup.py | 55 - src/dub/_hooks/sdkhooks.py | 12 +- src/dub/_hooks/types.py | 12 +- src/dub/analytics.py | 282 +- src/dub/basesdk.py | 208 ++ src/dub/domains.py | 1123 ++++---- src/dub/httpclient.py | 78 + src/dub/links.py | 2504 +++++++++-------- src/dub/metatags.py | 147 +- src/dub/models/components/clicksbrowsers.py | 19 +- src/dub/models/components/clickscities.py | 523 ++-- src/dub/models/components/clickscount.py | 15 +- src/dub/models/components/clickscountries.py | 519 ++-- src/dub/models/components/clicksdevices.py | 19 +- src/dub/models/components/clicksos.py | 19 +- src/dub/models/components/clicksreferers.py | 19 +- src/dub/models/components/clickstimeseries.py | 19 +- src/dub/models/components/clickstoplinks.py | 50 +- src/dub/models/components/clickstopurls.py | 19 +- src/dub/models/components/countrycode.py | 501 ++-- src/dub/models/components/domainschema.py | 76 +- src/dub/models/components/leadsbrowsers.py | 19 +- src/dub/models/components/leadscities.py | 523 ++-- src/dub/models/components/leadscount.py | 15 +- src/dub/models/components/leadscountries.py | 519 ++-- src/dub/models/components/leadsdevices.py | 19 +- src/dub/models/components/leadsos.py | 19 +- src/dub/models/components/leadsreferers.py | 19 +- src/dub/models/components/leadstimeseries.py | 19 +- src/dub/models/components/leadstoplinks.py | 50 +- src/dub/models/components/leadstopurls.py | 19 +- src/dub/models/components/linkgeotargeting.py | 767 +++-- src/dub/models/components/linkschema.py | 966 +++++-- src/dub/models/components/salesbrowsers.py | 23 +- src/dub/models/components/salescities.py | 527 ++-- src/dub/models/components/salescount.py | 19 +- src/dub/models/components/salescountries.py | 523 ++-- src/dub/models/components/salesdevices.py | 23 +- src/dub/models/components/salesos.py | 23 +- src/dub/models/components/salesreferers.py | 23 +- src/dub/models/components/salestimeseries.py | 23 +- src/dub/models/components/salestoplinks.py | 54 +- src/dub/models/components/salestopurls.py | 23 +- src/dub/models/components/security.py | 14 +- src/dub/models/components/tagschema.py | 37 +- src/dub/models/components/workspaceschema.py | 163 +- src/dub/models/errors/badrequest.py | 43 +- src/dub/models/errors/conflict.py | 43 +- src/dub/models/errors/forbidden.py | 43 +- src/dub/models/errors/internalservererror.py | 43 +- src/dub/models/errors/inviteexpired.py | 43 +- src/dub/models/errors/notfound.py | 43 +- src/dub/models/errors/ratelimitexceeded.py | 45 +- src/dub/models/errors/sdkerror.py | 24 +- src/dub/models/errors/unauthorized.py | 45 +- src/dub/models/errors/unprocessableentity.py | 43 +- src/dub/models/internal/globals.py | 20 +- src/dub/models/operations/__init__.py | 2 +- src/dub/models/operations/bulkcreatelinks.py | 165 +- src/dub/models/operations/bulkupdatelinks.py | 153 +- src/dub/models/operations/createdomain.py | 66 +- src/dub/models/operations/createlink.py | 165 +- src/dub/models/operations/createtag.py | 60 +- src/dub/models/operations/deletedomain.py | 42 +- src/dub/models/operations/deletelink.py | 42 +- src/dub/models/operations/getlinkinfo.py | 37 +- src/dub/models/operations/getlinks.py | 80 +- src/dub/models/operations/getlinkscount.py | 80 +- src/dub/models/operations/getmetatags.py | 63 +- src/dub/models/operations/getqrcode.py | 44 +- src/dub/models/operations/gettags.py | 23 +- src/dub/models/operations/getworkspace.py | 15 +- src/dub/models/operations/listdomains.py | 23 +- .../models/operations/retrieveanalytics.py | 192 +- src/dub/models/operations/trackcustomer.py | 87 +- src/dub/models/operations/tracklead.py | 132 +- src/dub/models/operations/tracksale.py | 140 +- src/dub/models/operations/updatedomain.py | 77 +- src/dub/models/operations/updatelink.py | 176 +- src/dub/models/operations/updatetag.py | 75 +- src/dub/models/operations/updateworkspace.py | 29 +- src/dub/models/operations/upsertlink.py | 165 +- src/dub/qr_codes.py | 271 +- src/dub/sdk.py | 104 +- src/dub/sdkconfiguration.py | 31 +- src/dub/tags.py | 854 +++--- src/dub/track.py | 855 +++--- src/dub/types/__init__.py | 9 + src/dub/types/basemodel.py | 18 + src/dub/utils/__init__.py | 73 +- src/dub/utils/enums.py | 34 + src/dub/utils/eventstreaming.py | 122 + src/dub/utils/forms.py | 207 ++ src/dub/utils/headers.py | 136 + src/dub/utils/metadata.py | 118 + src/dub/utils/queryparams.py | 162 ++ src/dub/utils/requestbodies.py | 66 + src/dub/utils/retries.py | 127 +- src/dub/utils/security.py | 166 ++ src/dub/utils/serializers.py | 159 ++ src/dub/utils/url.py | 152 + src/dub/utils/utils.py | 1099 -------- src/dub/utils/values.py | 110 + src/dub/workspaces.py | 536 ++-- 142 files changed, 11999 insertions(+), 8072 deletions(-) create mode 100644 docs/models/operations/updateworkspacerequestbody.md create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 setup.py create mode 100644 src/dub/basesdk.py create mode 100644 src/dub/httpclient.py create mode 100644 src/dub/types/__init__.py create mode 100644 src/dub/types/basemodel.py create mode 100644 src/dub/utils/enums.py create mode 100644 src/dub/utils/eventstreaming.py create mode 100644 src/dub/utils/forms.py create mode 100644 src/dub/utils/headers.py create mode 100644 src/dub/utils/metadata.py create mode 100644 src/dub/utils/queryparams.py create mode 100644 src/dub/utils/requestbodies.py create mode 100644 src/dub/utils/security.py create mode 100644 src/dub/utils/serializers.py create mode 100644 src/dub/utils/url.py delete mode 100644 src/dub/utils/utils.py create mode 100644 src/dub/utils/values.py diff --git a/.gitignore b/.gitignore index 3cdf338..171bf54 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +pyrightconfig.json venv/ src/*.egg-info/ __pycache__/ diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 92f860a..9c988d5 100755 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,34 +1,34 @@ lockVersion: 2.0.0 id: ab657d60-207e-43a4-8cc8-692a672950ed management: - docChecksum: bdbe0b838757272c0b9e043ea9c5cb5f + docChecksum: 095ac23ad79b88e598171eb4620de038 docVersion: 0.0.1 - speakeasyVersion: 1.326.3 + speakeasyVersion: 1.327.0 generationVersion: 2.359.6 - releaseVersion: 0.0.18 - configChecksum: e2de8442dab8baedd89ab6b339933cc8 + releaseVersion: 0.0.19 + configChecksum: e11e50b5bc5293e2ddb0174dc56c61ee repoURL: https://github.com/dubinc/dub-python.git installationURL: https://github.com/dubinc/dub-python.git published: true features: python: additionalDependencies: 0.1.0 - constsAndDefaults: 0.1.3 - core: 4.6.13 - deprecations: 2.81.1 - examples: 2.81.3 - flattening: 2.81.1 - globalSecurity: 2.83.5 + constsAndDefaults: 0.1.0 + core: 0.2.9 + deprecations: 0.1.1 + examples: 0.1.0 + flattening: 0.1.0 + globalSecurity: 0.1.0 globalSecurityCallbacks: 0.1.0 globalSecurityFlattening: 0.1.0 - globalServerURLs: 2.82.2 - globals: 2.82.1 + globalServerURLs: 0.1.0 + globals: 0.1.0 hiddenGlobals: 0.1.0 - nameOverrides: 2.81.2 + nameOverrides: 0.1.0 nullables: 0.1.0 responseFormat: 0.1.0 sdkHooks: 0.1.0 - unions: 2.82.8 + unions: 0.1.0 generatedFiles: - src/dub/sdkconfiguration.py - src/dub/links.py @@ -42,12 +42,26 @@ generatedFiles: - src/dub/sdk.py - py.typed - pylintrc + - pyproject.toml - scripts/publish.sh - - setup.py - src/dub/__init__.py + - src/dub/basesdk.py + - src/dub/httpclient.py + - src/dub/types/__init__.py + - src/dub/types/basemodel.py - src/dub/utils/__init__.py + - src/dub/utils/enums.py + - src/dub/utils/eventstreaming.py + - src/dub/utils/forms.py + - src/dub/utils/headers.py + - src/dub/utils/metadata.py + - src/dub/utils/queryparams.py + - src/dub/utils/requestbodies.py - src/dub/utils/retries.py - - src/dub/utils/utils.py + - src/dub/utils/security.py + - src/dub/utils/serializers.py + - src/dub/utils/url.py + - src/dub/utils/values.py - src/dub/models/errors/sdkerror.py - src/dub/models/components/linkschema.py - src/dub/models/components/tagschema.py @@ -248,6 +262,7 @@ generatedFiles: - docs/models/operations/retrieveanalyticsrequest.md - docs/models/operations/retrieveanalyticsresponsebody.md - docs/models/operations/getworkspacerequest.md + - docs/models/operations/updateworkspacerequestbody.md - docs/models/operations/updateworkspacerequest.md - docs/models/operations/gettagsglobals.md - docs/models/operations/gettagsrequest.md diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index 919f0bc..176db1a 100755 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -12,11 +12,13 @@ generation: auth: oAuth2ClientCredentialsEnabled: true python: - version: 0.0.18 + version: 0.0.19 additionalDependencies: - main: {} dev: {} + main: {} author: Speakeasy + authors: + - Speakeasy clientServerStatusCodesAsErrors: true description: Python Client SDK Generated by Speakeasy flattenGlobalSecurity: true diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index ec55f85..49ed7da 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -1,9 +1,9 @@ -speakeasyVersion: 1.326.3 +speakeasyVersion: 1.327.0 sources: dub: sourceNamespace: dub - sourceRevisionDigest: sha256:c77d4b572795047f21855982649899215d0ade7ee13f88dce207827418da3370 - sourceBlobDigest: sha256:003f8bfdee407b4771d198713e61c4025f32cff08a9b4a0ad136e3c45956b6f8 + sourceRevisionDigest: sha256:7cb5a6f77af859e4b583608c7cf88cc81a3408a724f961ff43d326d9f52f570e + sourceBlobDigest: sha256:0aa314984bceec52b66d91c35ce4871b625b28b397e2ef1a54ca1a496b776201 tags: - latest - main @@ -11,8 +11,8 @@ targets: my-first-target: source: dub sourceNamespace: dub - sourceRevisionDigest: sha256:c77d4b572795047f21855982649899215d0ade7ee13f88dce207827418da3370 - sourceBlobDigest: sha256:003f8bfdee407b4771d198713e61c4025f32cff08a9b4a0ad136e3c45956b6f8 + sourceRevisionDigest: sha256:7cb5a6f77af859e4b583608c7cf88cc81a3408a724f961ff43d326d9f52f570e + sourceBlobDigest: sha256:0aa314984bceec52b66d91c35ce4871b625b28b397e2ef1a54ca1a496b776201 outLocation: /github/workspace/repo workflow: workflowVersion: 1.0.0 diff --git a/README.md b/README.md index 9560db0..ef7cc47 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,15 @@ Learn more about the Dub.co Python SDK in the [official documentation](https://d ## SDK Installation +PIP ```bash pip install dub ``` + +Poetry +```bash +poetry add dub +``` @@ -25,51 +31,101 @@ pip install dub ### Example 1 ```python -import dub -from dub.models import operations +# Synchronous Example +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.create(request=operations.CreateLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.create(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response pass +``` + +
+The same SDK client can also be used to make asychronous requests by importing asyncio. +```python +# Asynchronous Example +import asyncio +from dub import Dub + +async def main(): + s = Dub( + token="DUB_API_KEY", + ) + res = await s.links.create_async(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", + ], + }) + if res is not None: + # handle response + pass + +asyncio.run(main()) ``` ### Example 2 ```python -import dub -from dub.models import operations +# Synchronous Example +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.upsert(request=operations.UpsertLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.upsert(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response pass +``` + +
+The same SDK client can also be used to make asychronous requests by importing asyncio. +```python +# Asynchronous Example +import asyncio +from dub import Dub + +async def main(): + s = Dub( + token="DUB_API_KEY", + ) + res = await s.links.upsert_async(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", + ], + }) + if res is not None: + # handle response + pass + +asyncio.run(main()) ``` @@ -146,16 +202,16 @@ Handling errors in this SDK should largely match your expectations. All operati ### Example ```python -import dub -from dub.models import errors, operations +from dub import Dub +from dub.models import errors -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) res = None try: - res = s.links.list(request=operations.GetLinksRequest()) + res = s.links.list(request={}) except errors.BadRequest as e: # handle exception @@ -209,16 +265,15 @@ You can override the default server globally by passing a server index to the `s #### Example ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( server_idx=0, token="DUB_API_KEY", ) -res = s.links.list(request=operations.GetLinksRequest()) +res = s.links.list(request={}) if res is not None: # handle response @@ -231,16 +286,15 @@ if res is not None: The default server can also be overridden globally by passing a URL to the `server_url: str` optional parameter when initializing the SDK client instance. For example: ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( server_url="https://api.dub.co", token="DUB_API_KEY", ) -res = s.links.list(request=operations.GetLinksRequest()) +res = s.links.list(request={}) if res is not None: # handle response @@ -252,16 +306,81 @@ if res is not None: ## Custom HTTP Client -The Python SDK makes API calls using the [requests](https://pypi.org/project/requests/) HTTP library. In order to provide a convenient way to configure timeouts, cookies, proxies, custom headers, and other low-level configuration, you can initialize the SDK client with a custom `requests.Session` object. +The Python SDK makes API calls using the [httpx](https://www.python-httpx.org/) HTTP library. In order to provide a convenient way to configure timeouts, cookies, proxies, custom headers, and other low-level configuration, you can initialize the SDK client with your own HTTP client instance. +Depending on whether you are using the sync or async version of the SDK, you can pass an instance of `HttpClient` or `AsyncHttpClient` respectively, which are Protocol's ensuring that the client has the necessary methods to make API calls. +This allows you to wrap the client with your own custom logic, such as adding custom headers, logging, or error handling, or you can just pass an instance of `httpx.Client` or `httpx.AsyncClient` directly. For example, you could specify a header for every request that this sdk makes as follows: ```python -import dub -import requests +from dub import Dub +import httpx -http_client = requests.Session() -http_client.headers.update({'x-custom-header': 'someValue'}) -s = dub.Dub(client=http_client) +http_client = httpx.Client(headers={"x-custom-header": "someValue"}) +s = Dub(client=http_client) +``` + +or you could wrap the client with your own custom logic: +```python +from dub import Dub +from dub.httpclient import AsyncHttpClient +import httpx + +class CustomClient(AsyncHttpClient): + client: AsyncHttpClient + + def __init__(self, client: AsyncHttpClient): + self.client = client + + async def send( + self, + request: httpx.Request, + *, + stream: bool = False, + auth: Union[ + httpx._types.AuthTypes, httpx._client.UseClientDefault, None + ] = httpx.USE_CLIENT_DEFAULT, + follow_redirects: Union[ + bool, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + ) -> httpx.Response: + request.headers["Client-Level-Header"] = "added by client" + + return await self.client.send( + request, stream=stream, auth=auth, follow_redirects=follow_redirects + ) + + def build_request( + self, + method: str, + url: httpx._types.URLTypes, + *, + content: Optional[httpx._types.RequestContent] = None, + data: Optional[httpx._types.RequestData] = None, + files: Optional[httpx._types.RequestFiles] = None, + json: Optional[Any] = None, + params: Optional[httpx._types.QueryParamTypes] = None, + headers: Optional[httpx._types.HeaderTypes] = None, + cookies: Optional[httpx._types.CookieTypes] = None, + timeout: Union[ + httpx._types.TimeoutTypes, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + extensions: Optional[httpx._types.RequestExtensions] = None, + ) -> httpx.Request: + return self.client.build_request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + +s = Dub(async_client=CustomClient(httpx.AsyncClient())) ``` @@ -278,15 +397,14 @@ This SDK supports the following security scheme globally: To authenticate with the API the `token` parameter must be set when initializing the SDK client instance. For example: ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.list(request=operations.GetLinksRequest()) +res = s.links.list(request={}) if res is not None: # handle response diff --git a/RELEASES.md b/RELEASES.md index dbf6163..75b5bc6 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -166,4 +166,14 @@ Based on: ### Generated - [python v0.0.18] . ### Releases -- [PyPI v0.0.18] https://pypi.org/project/dub/0.0.18 - . \ No newline at end of file +- [PyPI v0.0.18] https://pypi.org/project/dub/0.0.18 - . + +## 2024-07-08 17:40:18 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.327.0 (2.359.6) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.0.19] . +### Releases +- [PyPI v0.0.19] https://pypi.org/project/dub/0.0.19 - . \ No newline at end of file diff --git a/USAGE.md b/USAGE.md index 53776d7..ea61dd8 100644 --- a/USAGE.md +++ b/USAGE.md @@ -1,47 +1,97 @@ ```python -import dub -from dub.models import operations +# Synchronous Example +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.create(request=operations.CreateLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.create(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response pass +``` + +
+ +The same SDK client can also be used to make asychronous requests by importing asyncio. +```python +# Asynchronous Example +import asyncio +from dub import Dub + +async def main(): + s = Dub( + token="DUB_API_KEY", + ) + res = await s.links.create_async(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", + ], + }) + if res is not None: + # handle response + pass +asyncio.run(main()) ``` ```python -import dub -from dub.models import operations +# Synchronous Example +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.upsert(request=operations.UpsertLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.upsert(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response pass +``` + +
+ +The same SDK client can also be used to make asychronous requests by importing asyncio. +```python +# Asynchronous Example +import asyncio +from dub import Dub + +async def main(): + s = Dub( + token="DUB_API_KEY", + ) + res = await s.links.upsert_async(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", + ], + }) + if res is not None: + # handle response + pass +asyncio.run(main()) ``` \ No newline at end of file diff --git a/docs/models/components/domainschema.md b/docs/models/components/domainschema.md index f80c557..6749ae5 100644 --- a/docs/models/components/domainschema.md +++ b/docs/models/components/domainschema.md @@ -7,7 +7,7 @@ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | | `id` | *str* | :heavy_check_mark: | The unique identifier of the domain. | | | `slug` | *str* | :heavy_check_mark: | The domain name. | acme.com | -| `expired_url` | *Optional[str]* | :heavy_check_mark: | The URL to redirect to when a link under this domain has expired. | https://acme.com/expired | +| `expired_url` | *Nullable[str]* | :heavy_check_mark: | The URL to redirect to when a link under this domain has expired. | https://acme.com/expired | | `created_at` | *str* | :heavy_check_mark: | The date the domain was created. | | | `updated_at` | *str* | :heavy_check_mark: | The date the domain was last updated. | | | `verified` | *Optional[bool]* | :heavy_minus_sign: | Whether the domain is verified. | | diff --git a/docs/models/components/linkschema.md b/docs/models/components/linkschema.md index 8627590..d060951 100644 --- a/docs/models/components/linkschema.md +++ b/docs/models/components/linkschema.md @@ -8,30 +8,30 @@ | `id` | *str* | :heavy_check_mark: | The unique ID of the short link. | | `domain` | *str* | :heavy_check_mark: | The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains). | | `key` | *str* | :heavy_check_mark: | The short link slug. If not provided, a random 7-character slug will be generated. | -| `external_id` | *Optional[str]* | :heavy_check_mark: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with 'ext_' when passed as a query parameter. | +| `external_id` | *Nullable[str]* | :heavy_check_mark: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with 'ext_' when passed as a query parameter. | | `url` | *str* | :heavy_check_mark: | The destination URL of the short link. | -| `expires_at` | *Optional[str]* | :heavy_check_mark: | The date and time when the short link will expire in ISO-8601 format. | -| `expired_url` | *Optional[str]* | :heavy_check_mark: | The URL to redirect to when the short link has expired. | -| `password` | *Optional[str]* | :heavy_check_mark: | The password required to access the destination URL of the short link. | -| `title` | *Optional[str]* | :heavy_check_mark: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | -| `description` | *Optional[str]* | :heavy_check_mark: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | -| `image` | *Optional[str]* | :heavy_check_mark: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | -| `ios` | *Optional[str]* | :heavy_check_mark: | The iOS destination URL for the short link for iOS device targeting. | -| `android` | *Optional[str]* | :heavy_check_mark: | The Android destination URL for the short link for Android device targeting. | -| `geo` | [Optional[components.Geo]](../../models/components/geo.md) | :heavy_check_mark: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. Learn more: https://d.to/geo | -| ~~`tag_id`~~ | *Optional[str]* | :heavy_check_mark: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tags` instead. | +| `expires_at` | *Nullable[str]* | :heavy_check_mark: | The date and time when the short link will expire in ISO-8601 format. | +| `expired_url` | *Nullable[str]* | :heavy_check_mark: | The URL to redirect to when the short link has expired. | +| `password` | *Nullable[str]* | :heavy_check_mark: | The password required to access the destination URL of the short link. | +| `title` | *Nullable[str]* | :heavy_check_mark: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | +| `description` | *Nullable[str]* | :heavy_check_mark: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | +| `image` | *Nullable[str]* | :heavy_check_mark: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | +| `ios` | *Nullable[str]* | :heavy_check_mark: | The iOS destination URL for the short link for iOS device targeting. | +| `android` | *Nullable[str]* | :heavy_check_mark: | The Android destination URL for the short link for Android device targeting. | +| `geo` | [Nullable[components.Geo]](../../models/components/geo.md) | :heavy_check_mark: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. Learn more: https://d.to/geo | +| ~~`tag_id`~~ | *Nullable[str]* | :heavy_check_mark: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tags` instead. | | `tags` | List[[components.TagSchema](../../models/components/tagschema.md)] | :heavy_check_mark: | The tags assigned to the short link. | -| `comments` | *Optional[str]* | :heavy_check_mark: | The comments for the short link. | +| `comments` | *Nullable[str]* | :heavy_check_mark: | The comments for the short link. | | `short_link` | *str* | :heavy_check_mark: | The full URL of the short link, including the https protocol (e.g. `https://dub.sh/try`). | | `qr_code` | *str* | :heavy_check_mark: | The full URL of the QR code for the short link (e.g. `https://api.dub.co/qr?url=https://dub.sh/try`). | -| `utm_source` | *Optional[str]* | :heavy_check_mark: | The UTM source of the short link. | -| `utm_medium` | *Optional[str]* | :heavy_check_mark: | The UTM medium of the short link. | -| `utm_campaign` | *Optional[str]* | :heavy_check_mark: | The UTM campaign of the short link. | -| `utm_term` | *Optional[str]* | :heavy_check_mark: | The UTM term of the short link. | -| `utm_content` | *Optional[str]* | :heavy_check_mark: | The UTM content of the short link. | +| `utm_source` | *Nullable[str]* | :heavy_check_mark: | The UTM source of the short link. | +| `utm_medium` | *Nullable[str]* | :heavy_check_mark: | The UTM medium of the short link. | +| `utm_campaign` | *Nullable[str]* | :heavy_check_mark: | The UTM campaign of the short link. | +| `utm_term` | *Nullable[str]* | :heavy_check_mark: | The UTM term of the short link. | +| `utm_content` | *Nullable[str]* | :heavy_check_mark: | The UTM content of the short link. | | `user_id` | *str* | :heavy_check_mark: | The user ID of the creator of the short link. | | `workspace_id` | *str* | :heavy_check_mark: | The workspace ID of the short link. | -| `last_clicked` | *Optional[str]* | :heavy_check_mark: | The date and time when the short link was last clicked. | +| `last_clicked` | *Nullable[str]* | :heavy_check_mark: | The date and time when the short link was last clicked. | | `created_at` | *str* | :heavy_check_mark: | The date and time when the short link was created. | | `updated_at` | *str* | :heavy_check_mark: | The date and time when the short link was last updated. | | ~~`project_id`~~ | *str* | :heavy_check_mark: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The project ID of the short link. This field is deprecated – use `workspaceId` instead. | diff --git a/docs/models/components/workspaceschema.md b/docs/models/components/workspaceschema.md index beaad1f..3b04e9e 100644 --- a/docs/models/components/workspaceschema.md +++ b/docs/models/components/workspaceschema.md @@ -16,12 +16,12 @@ | `tags_limit` | *float* | :heavy_check_mark: | The tags limit of the workspace. | | `users_limit` | *float* | :heavy_check_mark: | The users limit of the workspace. | | `plan` | [components.Plan](../../models/components/plan.md) | :heavy_check_mark: | The plan of the workspace. | -| `stripe_id` | *Optional[str]* | :heavy_check_mark: | The Stripe ID of the workspace. | +| `stripe_id` | *Nullable[str]* | :heavy_check_mark: | The Stripe ID of the workspace. | | `billing_cycle_start` | *float* | :heavy_check_mark: | The date and time when the billing cycle starts for the workspace. | -| `stripe_connect_id` | *Optional[str]* | :heavy_check_mark: | [BETA]: The Stripe Connect ID of the workspace. | +| `stripe_connect_id` | *Nullable[str]* | :heavy_check_mark: | [BETA]: The Stripe Connect ID of the workspace. | | `created_at` | *str* | :heavy_check_mark: | The date and time when the workspace was created. | | `users` | List[[components.Users](../../models/components/users.md)] | :heavy_check_mark: | The role of the authenticated user in the workspace. | | `domains` | List[[components.Domains](../../models/components/domains.md)] | :heavy_check_mark: | The domains of the workspace. | -| `invite_code` | *Optional[str]* | :heavy_check_mark: | The invite code of the workspace. | -| `logo` | *Optional[str]* | :heavy_minus_sign: | The logo of the workspace. | +| `invite_code` | *Nullable[str]* | :heavy_check_mark: | The invite code of the workspace. | +| `logo` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The logo of the workspace. | | `beta_tester` | *Optional[bool]* | :heavy_minus_sign: | Whether the workspace is enrolled in the beta testing program. | \ No newline at end of file diff --git a/docs/models/operations/createdomainrequestbody.md b/docs/models/operations/createdomainrequestbody.md index c9f85db..aa9cdca 100644 --- a/docs/models/operations/createdomainrequestbody.md +++ b/docs/models/operations/createdomainrequestbody.md @@ -6,6 +6,6 @@ | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | | `slug` | *str* | :heavy_check_mark: | Name of the domain. | acme.com | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | Redirect users to a specific URL when any link under this domain has expired. | https://acme.com/expired | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Redirect users to a specific URL when any link under this domain has expired. | https://acme.com/expired | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether to archive this domain. `false` will unarchive a previously archived domain. | false | -| `placeholder` | *Optional[str]* | :heavy_minus_sign: | Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened. | https://dub.co/help/article/what-is-dub | \ No newline at end of file +| `placeholder` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened. | https://dub.co/help/article/what-is-dub | \ No newline at end of file diff --git a/docs/models/operations/createlinkrequestbody.md b/docs/models/operations/createlinkrequestbody.md index 607ae42..7bad1eb 100644 --- a/docs/models/operations/createlinkrequestbody.md +++ b/docs/models/operations/createlinkrequestbody.md @@ -8,24 +8,24 @@ | `url` | *str* | :heavy_check_mark: | The destination URL of the short link. | https://google/com | | `domain` | *Optional[str]* | :heavy_minus_sign: | The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains). | | | `key` | *Optional[str]* | :heavy_minus_sign: | The short link slug. If not provided, a random 7-character slug will be generated. | | -| `external_id` | *Optional[str]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | +| `external_id` | *Optional[Nullable[str]]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | | `prefix` | *Optional[str]* | :heavy_minus_sign: | The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided. | | | `track_conversion` | *Optional[bool]* | :heavy_minus_sign: | Whether to track conversions for the short link. | | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link is archived. | | | `public_stats` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link's stats are publicly accessible. | | -| ~~`tag_id`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | +| ~~`tag_id`~~ | *Optional[Nullable[str]]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | | `tag_ids` | [Optional[operations.TagIds]](../../models/operations/tagids.md) | :heavy_minus_sign: | The unique IDs of the tags assigned to the short link. | [
"clux0rgak00011..."
] | | `tag_names` | [Optional[operations.TagNames]](../../models/operations/tagnames.md) | :heavy_minus_sign: | The unique name of the tags assigned to the short link (case insensitive). | | -| `comments` | *Optional[str]* | :heavy_minus_sign: | The comments for the short link. | | -| `expires_at` | *Optional[str]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | -| `password` | *Optional[str]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | +| `comments` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The comments for the short link. | | +| `expires_at` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | +| `password` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | | `proxy` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses Custom Social Media Cards feature. | | -| `title` | *Optional[str]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `description` | *Optional[str]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `image` | *Optional[str]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `title` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `description` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `image` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | | `rewrite` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses link cloaking. | | -| `ios` | *Optional[str]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | -| `android` | *Optional[str]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | -| `geo` | [Optional[components.LinkGeoTargeting]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | +| `ios` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | +| `android` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | +| `geo` | [Optional[Nullable[components.LinkGeoTargeting]]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | | `do_index` | *Optional[bool]* | :heavy_minus_sign: | Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex | | \ No newline at end of file diff --git a/docs/models/operations/data.md b/docs/models/operations/data.md index 5a97a4b..ace77ef 100644 --- a/docs/models/operations/data.md +++ b/docs/models/operations/data.md @@ -9,19 +9,19 @@ | `track_conversion` | *Optional[bool]* | :heavy_minus_sign: | Whether to track conversions for the short link. | | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link is archived. | | | `public_stats` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link's stats are publicly accessible. | | -| ~~`tag_id`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | +| ~~`tag_id`~~ | *Optional[Nullable[str]]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | | `tag_ids` | [Optional[operations.BulkUpdateLinksTagIds]](../../models/operations/bulkupdatelinkstagids.md) | :heavy_minus_sign: | The unique IDs of the tags assigned to the short link. | [
"clux0rgak00011..."
] | | `tag_names` | [Optional[operations.BulkUpdateLinksTagNames]](../../models/operations/bulkupdatelinkstagnames.md) | :heavy_minus_sign: | The unique name of the tags assigned to the short link (case insensitive). | | -| `comments` | *Optional[str]* | :heavy_minus_sign: | The comments for the short link. | | -| `expires_at` | *Optional[str]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | -| `password` | *Optional[str]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | +| `comments` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The comments for the short link. | | +| `expires_at` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | +| `password` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | | `proxy` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses Custom Social Media Cards feature. | | -| `title` | *Optional[str]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `description` | *Optional[str]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `image` | *Optional[str]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `title` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `description` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `image` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | | `rewrite` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses link cloaking. | | -| `ios` | *Optional[str]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | -| `android` | *Optional[str]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | -| `geo` | [Optional[components.LinkGeoTargeting]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | +| `ios` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | +| `android` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | +| `geo` | [Optional[Nullable[components.LinkGeoTargeting]]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | | `do_index` | *Optional[bool]* | :heavy_minus_sign: | Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex | | \ No newline at end of file diff --git a/docs/models/operations/getmetatagsresponsebody.md b/docs/models/operations/getmetatagsresponsebody.md index c23d341..91b71e6 100644 --- a/docs/models/operations/getmetatagsresponsebody.md +++ b/docs/models/operations/getmetatagsresponsebody.md @@ -7,6 +7,6 @@ The retrieved metatags | Field | Type | Required | Description | Example | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| `title` | *Optional[str]* | :heavy_check_mark: | The meta title tag for the URL. | Dub.co - Link Management for Modern Marketing Teams | -| `description` | *Optional[str]* | :heavy_check_mark: | The meta description tag for the URL. | Dub.co is the open-source link management infrastructure ... | -| `image` | *Optional[str]* | :heavy_check_mark: | The OpenGraph image for the URL. | https://assets.dub.co/thumbnail.jpg | \ No newline at end of file +| `title` | *Nullable[str]* | :heavy_check_mark: | The meta title tag for the URL. | Dub.co - Link Management for Modern Marketing Teams | +| `description` | *Nullable[str]* | :heavy_check_mark: | The meta description tag for the URL. | Dub.co is the open-source link management infrastructure ... | +| `image` | *Nullable[str]* | :heavy_check_mark: | The OpenGraph image for the URL. | https://assets.dub.co/thumbnail.jpg | \ No newline at end of file diff --git a/docs/models/operations/requestbody.md b/docs/models/operations/requestbody.md index 260e06e..ba67818 100644 --- a/docs/models/operations/requestbody.md +++ b/docs/models/operations/requestbody.md @@ -8,24 +8,24 @@ | `url` | *str* | :heavy_check_mark: | The destination URL of the short link. | https://google/com | | `domain` | *Optional[str]* | :heavy_minus_sign: | The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains). | | | `key` | *Optional[str]* | :heavy_minus_sign: | The short link slug. If not provided, a random 7-character slug will be generated. | | -| `external_id` | *Optional[str]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | +| `external_id` | *Optional[Nullable[str]]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | | `prefix` | *Optional[str]* | :heavy_minus_sign: | The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided. | | | `track_conversion` | *Optional[bool]* | :heavy_minus_sign: | Whether to track conversions for the short link. | | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link is archived. | | | `public_stats` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link's stats are publicly accessible. | | -| ~~`tag_id`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | +| ~~`tag_id`~~ | *Optional[Nullable[str]]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | | `tag_ids` | [Optional[operations.BulkCreateLinksTagIds]](../../models/operations/bulkcreatelinkstagids.md) | :heavy_minus_sign: | The unique IDs of the tags assigned to the short link. | [
"clux0rgak00011..."
] | | `tag_names` | [Optional[operations.BulkCreateLinksTagNames]](../../models/operations/bulkcreatelinkstagnames.md) | :heavy_minus_sign: | The unique name of the tags assigned to the short link (case insensitive). | | -| `comments` | *Optional[str]* | :heavy_minus_sign: | The comments for the short link. | | -| `expires_at` | *Optional[str]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | -| `password` | *Optional[str]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | +| `comments` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The comments for the short link. | | +| `expires_at` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | +| `password` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | | `proxy` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses Custom Social Media Cards feature. | | -| `title` | *Optional[str]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `description` | *Optional[str]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `image` | *Optional[str]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `title` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `description` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `image` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | | `rewrite` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses link cloaking. | | -| `ios` | *Optional[str]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | -| `android` | *Optional[str]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | -| `geo` | [Optional[components.LinkGeoTargeting]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | +| `ios` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | +| `android` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | +| `geo` | [Optional[Nullable[components.LinkGeoTargeting]]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | | `do_index` | *Optional[bool]* | :heavy_minus_sign: | Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex | | \ No newline at end of file diff --git a/docs/models/operations/trackcustomerresponsebody.md b/docs/models/operations/trackcustomerresponsebody.md index fca00b3..b535301 100644 --- a/docs/models/operations/trackcustomerresponsebody.md +++ b/docs/models/operations/trackcustomerresponsebody.md @@ -8,6 +8,6 @@ A customer was tracked. | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | | `customer_id` | *str* | :heavy_check_mark: | N/A | -| `customer_name` | *Optional[str]* | :heavy_check_mark: | N/A | -| `customer_email` | *Optional[str]* | :heavy_check_mark: | N/A | -| `customer_avatar` | *Optional[str]* | :heavy_check_mark: | N/A | \ No newline at end of file +| `customer_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `customer_email` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `customer_avatar` | *Nullable[str]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/operations/trackleadrequestbody.md b/docs/models/operations/trackleadrequestbody.md index 7052d34..e0bac7b 100644 --- a/docs/models/operations/trackleadrequestbody.md +++ b/docs/models/operations/trackleadrequestbody.md @@ -8,7 +8,7 @@ | `click_id` | *str* | :heavy_check_mark: | The ID of the click in th Dub. You can read this value from `dclid` cookie. | | | `event_name` | *str* | :heavy_check_mark: | The name of the event to track. | Sign up | | `customer_id` | *str* | :heavy_check_mark: | This is the unique identifier for the customer in the client's app. This is used to track the customer's journey. | | -| `customer_name` | *Optional[str]* | :heavy_minus_sign: | Name of the customer in the client's app. | | -| `customer_email` | *Optional[str]* | :heavy_minus_sign: | Email of the customer in the client's app. | | -| `customer_avatar` | *Optional[str]* | :heavy_minus_sign: | Avatar of the customer in the client's app. | | +| `customer_name` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Name of the customer in the client's app. | | +| `customer_email` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Email of the customer in the client's app. | | +| `customer_avatar` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Avatar of the customer in the client's app. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Additional metadata to be stored with the lead event | | \ No newline at end of file diff --git a/docs/models/operations/trackleadresponsebody.md b/docs/models/operations/trackleadresponsebody.md index 62d3358..6ac1f31 100644 --- a/docs/models/operations/trackleadresponsebody.md +++ b/docs/models/operations/trackleadresponsebody.md @@ -10,7 +10,7 @@ A lead was tracked. | `click_id` | *str* | :heavy_check_mark: | N/A | | `event_name` | *str* | :heavy_check_mark: | N/A | | `customer_id` | *str* | :heavy_check_mark: | N/A | -| `customer_name` | *Optional[str]* | :heavy_check_mark: | N/A | -| `customer_email` | *Optional[str]* | :heavy_check_mark: | N/A | -| `customer_avatar` | *Optional[str]* | :heavy_check_mark: | N/A | +| `customer_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `customer_email` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `customer_avatar` | *Nullable[str]* | :heavy_check_mark: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/tracksalerequestbody.md b/docs/models/operations/tracksalerequestbody.md index d546035..96de44e 100644 --- a/docs/models/operations/tracksalerequestbody.md +++ b/docs/models/operations/tracksalerequestbody.md @@ -9,6 +9,6 @@ | `amount` | *int* | :heavy_check_mark: | The amount of the sale. Should be passed in cents. | | | `payment_processor` | [operations.PaymentProcessor](../../models/operations/paymentprocessor.md) | :heavy_check_mark: | The payment processor via which the sale was made. | | | `event_name` | *Optional[str]* | :heavy_minus_sign: | The name of the sale event. It can be used to track different types of event for example 'Purchase', 'Upgrade', 'Payment', etc. | Purchase | -| `invoice_id` | *Optional[str]* | :heavy_minus_sign: | The invoice ID of the sale. | | +| `invoice_id` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The invoice ID of the sale. | | | `currency` | *Optional[str]* | :heavy_minus_sign: | The currency of the sale. Accepts ISO 4217 currency codes. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Additional metadata to be stored with the sale event. | | \ No newline at end of file diff --git a/docs/models/operations/tracksaleresponsebody.md b/docs/models/operations/tracksaleresponsebody.md index b71cf01..fa50212 100644 --- a/docs/models/operations/tracksaleresponsebody.md +++ b/docs/models/operations/tracksaleresponsebody.md @@ -11,6 +11,6 @@ A sale was tracked. | `customer_id` | *str* | :heavy_check_mark: | N/A | | `amount` | *float* | :heavy_check_mark: | N/A | | `payment_processor` | *str* | :heavy_check_mark: | N/A | -| `invoice_id` | *Optional[str]* | :heavy_check_mark: | N/A | +| `invoice_id` | *Nullable[str]* | :heavy_check_mark: | N/A | | `currency` | *str* | :heavy_check_mark: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/operations/updatedomainrequestbody.md b/docs/models/operations/updatedomainrequestbody.md index a8e2833..c895abc 100644 --- a/docs/models/operations/updatedomainrequestbody.md +++ b/docs/models/operations/updatedomainrequestbody.md @@ -6,6 +6,6 @@ | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | | `slug` | *Optional[str]* | :heavy_minus_sign: | Name of the domain. | acme.com | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | Redirect users to a specific URL when any link under this domain has expired. | https://acme.com/expired | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Redirect users to a specific URL when any link under this domain has expired. | https://acme.com/expired | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether to archive this domain. `false` will unarchive a previously archived domain. | false | -| `placeholder` | *Optional[str]* | :heavy_minus_sign: | Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened. | https://dub.co/help/article/what-is-dub | \ No newline at end of file +| `placeholder` | *Optional[Nullable[str]]* | :heavy_minus_sign: | Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened. | https://dub.co/help/article/what-is-dub | \ No newline at end of file diff --git a/docs/models/operations/updatelinkrequestbody.md b/docs/models/operations/updatelinkrequestbody.md index 2b33a85..bbba4cf 100644 --- a/docs/models/operations/updatelinkrequestbody.md +++ b/docs/models/operations/updatelinkrequestbody.md @@ -5,27 +5,27 @@ | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `url` | *str* | :heavy_check_mark: | The destination URL of the short link. | https://google/com | +| `url` | *Optional[str]* | :heavy_minus_sign: | The destination URL of the short link. | https://google/com | | `domain` | *Optional[str]* | :heavy_minus_sign: | The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains). | | | `key` | *Optional[str]* | :heavy_minus_sign: | The short link slug. If not provided, a random 7-character slug will be generated. | | -| `external_id` | *Optional[str]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | +| `external_id` | *Optional[Nullable[str]]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | | `prefix` | *Optional[str]* | :heavy_minus_sign: | The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided. | | | `track_conversion` | *Optional[bool]* | :heavy_minus_sign: | Whether to track conversions for the short link. | | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link is archived. | | | `public_stats` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link's stats are publicly accessible. | | -| ~~`tag_id`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | +| ~~`tag_id`~~ | *Optional[Nullable[str]]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | | `tag_ids` | [Optional[operations.UpdateLinkTagIds]](../../models/operations/updatelinktagids.md) | :heavy_minus_sign: | The unique IDs of the tags assigned to the short link. | [
"clux0rgak00011..."
] | | `tag_names` | [Optional[operations.UpdateLinkTagNames]](../../models/operations/updatelinktagnames.md) | :heavy_minus_sign: | The unique name of the tags assigned to the short link (case insensitive). | | -| `comments` | *Optional[str]* | :heavy_minus_sign: | The comments for the short link. | | -| `expires_at` | *Optional[str]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | -| `password` | *Optional[str]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | +| `comments` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The comments for the short link. | | +| `expires_at` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | +| `password` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | | `proxy` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses Custom Social Media Cards feature. | | -| `title` | *Optional[str]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `description` | *Optional[str]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `image` | *Optional[str]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `title` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `description` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `image` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | | `rewrite` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses link cloaking. | | -| `ios` | *Optional[str]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | -| `android` | *Optional[str]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | -| `geo` | [Optional[components.LinkGeoTargeting]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | +| `ios` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | +| `android` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | +| `geo` | [Optional[Nullable[components.LinkGeoTargeting]]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | N/A | | | `do_index` | *Optional[bool]* | :heavy_minus_sign: | Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex | | \ No newline at end of file diff --git a/docs/models/operations/updatetagrequest.md b/docs/models/operations/updatetagrequest.md index d57da08..4d55325 100644 --- a/docs/models/operations/updatetagrequest.md +++ b/docs/models/operations/updatetagrequest.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -| `id` | *str* | :heavy_check_mark: | The ID of the tag | +| `id` | *str* | :heavy_check_mark: | The ID of the tag to update. | | `request_body` | [Optional[operations.UpdateTagRequestBody]](../../models/operations/updatetagrequestbody.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/updateworkspacerequest.md b/docs/models/operations/updateworkspacerequest.md index 936145e..4aa2f9e 100644 --- a/docs/models/operations/updateworkspacerequest.md +++ b/docs/models/operations/updateworkspacerequest.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------- | -------------------------------- | -------------------------------- | -------------------------------- | -| `id_or_slug` | *str* | :heavy_check_mark: | The ID or slug of the workspace. | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `id_or_slug` | *str* | :heavy_check_mark: | The ID or slug of the workspace to update. | +| `request_body` | [Optional[operations.UpdateWorkspaceRequestBody]](../../models/operations/updateworkspacerequestbody.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/updateworkspacerequestbody.md b/docs/models/operations/updateworkspacerequestbody.md new file mode 100644 index 0000000..1ad891e --- /dev/null +++ b/docs/models/operations/updateworkspacerequestbody.md @@ -0,0 +1,9 @@ +# UpdateWorkspaceRequestBody + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `name` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `slug` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/upsertlinkrequestbody.md b/docs/models/operations/upsertlinkrequestbody.md index 4473d7f..2149746 100644 --- a/docs/models/operations/upsertlinkrequestbody.md +++ b/docs/models/operations/upsertlinkrequestbody.md @@ -8,24 +8,24 @@ | `url` | *str* | :heavy_check_mark: | The destination URL of the short link. | https://google/com | | `domain` | *Optional[str]* | :heavy_minus_sign: | The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains). | | | `key` | *Optional[str]* | :heavy_minus_sign: | The short link slug. If not provided, a random 7-character slug will be generated. | | -| `external_id` | *Optional[str]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | +| `external_id` | *Optional[Nullable[str]]* | :heavy_minus_sign: | This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter. | 123456 | | `prefix` | *Optional[str]* | :heavy_minus_sign: | The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided. | | | `track_conversion` | *Optional[bool]* | :heavy_minus_sign: | Whether to track conversions for the short link. | | | `archived` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link is archived. | | | `public_stats` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link's stats are publicly accessible. | | -| ~~`tag_id`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | +| ~~`tag_id`~~ | *Optional[Nullable[str]]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. | | | `tag_ids` | [Optional[operations.UpsertLinkTagIds]](../../models/operations/upsertlinktagids.md) | :heavy_minus_sign: | The unique IDs of the tags assigned to the short link. | [
"clux0rgak00011..."
] | | `tag_names` | [Optional[operations.UpsertLinkTagNames]](../../models/operations/upsertlinktagnames.md) | :heavy_minus_sign: | The unique name of the tags assigned to the short link (case insensitive). | | -| `comments` | *Optional[str]* | :heavy_minus_sign: | The comments for the short link. | | -| `expires_at` | *Optional[str]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | -| `expired_url` | *Optional[str]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | -| `password` | *Optional[str]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | +| `comments` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The comments for the short link. | | +| `expires_at` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The date and time when the short link will expire at. | | +| `expired_url` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The URL to redirect to when the short link has expired. | | +| `password` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The password required to access the destination URL of the short link. | | | `proxy` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses Custom Social Media Cards feature. | | -| `title` | *Optional[str]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `description` | *Optional[str]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | -| `image` | *Optional[str]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `title` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `description` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | +| `image` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true. | | | `rewrite` | *Optional[bool]* | :heavy_minus_sign: | Whether the short link uses link cloaking. | | -| `ios` | *Optional[str]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | -| `android` | *Optional[str]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | -| `geo` | [Optional[components.LinkGeoTargeting]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | +| `ios` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The iOS destination URL for the short link for iOS device targeting. | | +| `android` | *Optional[Nullable[str]]* | :heavy_minus_sign: | The Android destination URL for the short link for Android device targeting. | | +| `geo` | [Optional[Nullable[components.LinkGeoTargeting]]](../../models/components/linkgeotargeting.md) | :heavy_minus_sign: | Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. | | | `do_index` | *Optional[bool]* | :heavy_minus_sign: | Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex | | \ No newline at end of file diff --git a/docs/sdks/analytics/README.md b/docs/sdks/analytics/README.md index df8f0dd..303e0d8 100644 --- a/docs/sdks/analytics/README.md +++ b/docs/sdks/analytics/README.md @@ -12,22 +12,21 @@ Retrieve analytics for a link, a domain, or the authenticated workspace. The res ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.analytics.retrieve(request=operations.RetrieveAnalyticsRequest( - timezone='America/New_York', - city='New York', - device='Desktop', - browser='Chrome', - os='Windows', - referer='google.com', -)) +res = s.analytics.retrieve(request={ + "timezone": "America/New_York", + "city": "New York", + "device": "Desktop", + "browser": "Chrome", + "os": "Windows", + "referer": "google.com", +}) if res is not None: # handle response diff --git a/docs/sdks/domains/README.md b/docs/sdks/domains/README.md index 520473c..80cfe89 100644 --- a/docs/sdks/domains/README.md +++ b/docs/sdks/domains/README.md @@ -15,15 +15,14 @@ Retrieve a list of domains associated with the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.domains.list(request=operations.ListDomainsRequest()) +res = s.domains.list() if res is not None: # handle response @@ -31,12 +30,6 @@ if res is not None: ``` -### Parameters - -| Parameter | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `request` | [operations.ListDomainsRequest](../../models/operations/listdomainsrequest.md) | :heavy_check_mark: | The request object to use for the request. | - ### Response @@ -63,20 +56,19 @@ Create a domain for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.domains.create(request=operations.CreateDomainRequestBody( - slug='acme.com', - expired_url='https://acme.com/expired', - archived=False, - placeholder='https://dub.co/help/article/what-is-dub', -)) +res = s.domains.create(request={ + "slug": "acme.com", + "expired_url": "https://acme.com/expired", + "archived": False, + "placeholder": "https://dub.co/help/article/what-is-dub", +}) if res is not None: # handle response @@ -116,14 +108,14 @@ Delete a domain from a workspace. It cannot be undone. This will also delete all ### Example Usage ```python -import dub +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.domains.delete(slug='acme.com') +res = s.domains.delete(slug="acme.com") if res is not None: # handle response @@ -163,20 +155,19 @@ Update a domain for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.domains.update(slug='acme.com', request_body=operations.UpdateDomainRequestBody( - slug='acme.com', - expired_url='https://acme.com/expired', - archived=False, - placeholder='https://dub.co/help/article/what-is-dub', -)) +res = s.domains.update(slug="acme.com", request_body={ + "slug": "acme.com", + "expired_url": "https://acme.com/expired", + "archived": False, + "placeholder": "https://dub.co/help/article/what-is-dub", +}) if res is not None: # handle response diff --git a/docs/sdks/links/README.md b/docs/sdks/links/README.md index 145e12e..1a22272 100644 --- a/docs/sdks/links/README.md +++ b/docs/sdks/links/README.md @@ -20,15 +20,14 @@ Retrieve a paginated list of links for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.list(request=operations.GetLinksRequest()) +res = s.links.list(request={}) if res is not None: # handle response @@ -68,21 +67,20 @@ Create a new link for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.create(request=operations.CreateLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.create(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response @@ -122,15 +120,14 @@ Retrieve the number of links for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.count(request=operations.GetLinksCountRequest()) +res = s.links.count(request={}) if res is not None: # handle response @@ -170,18 +167,17 @@ Retrieve the info for a link. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.get(request=operations.GetLinkInfoRequest( - link_id='clux0rgak00011...', - external_id='ext_123456', -)) +res = s.links.get(request={ + "link_id": "clux0rgak00011...", + "external_id": "ext_123456", +}) if res is not None: # handle response @@ -221,14 +217,14 @@ Delete a link for the authenticated workspace. ### Example Usage ```python -import dub +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.delete(link_id='') +res = s.links.delete(link_id="") if res is not None: # handle response @@ -268,21 +264,20 @@ Update a link for the authenticated workspace. If there's no change, returns it ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.update(link_id='', request_body=operations.UpdateLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.update(link_id="", request_body={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response @@ -323,22 +318,21 @@ Bulk create up to 100 links for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) res = s.links.create_many(request=[ - operations.RequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', + { + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], - ), + }, ]) if res is not None: @@ -379,25 +373,24 @@ Bulk update up to 100 links with the same data for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.update_many(request=operations.BulkUpdateLinksRequestBody( - link_ids=[ - '', +res = s.links.update_many(request={ + "link_ids": [ + "", ], - data=operations.Data( - url='https://google/com', - tag_ids=[ - 'clux0rgak00011...', + "data": { + "url": "https://google/com", + "tag_ids": [ + "clux0rgak00011...", ], - ), -)) + }, +}) if res is not None: # handle response @@ -437,21 +430,20 @@ Upsert a link for the authenticated workspace by its URL. If a link with the sam ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.links.upsert(request=operations.UpsertLinkRequestBody( - url='https://google/com', - external_id='123456', - tag_ids=[ - 'clux0rgak00011...', +res = s.links.upsert(request={ + "url": "https://google/com", + "external_id": "123456", + "tag_ids": [ + "clux0rgak00011...", ], -)) +}) if res is not None: # handle response diff --git a/docs/sdks/metatags/README.md b/docs/sdks/metatags/README.md index 88b7410..3482cb0 100644 --- a/docs/sdks/metatags/README.md +++ b/docs/sdks/metatags/README.md @@ -12,17 +12,16 @@ Retrieve the metatags for a URL. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.metatags.get(request=operations.GetMetatagsRequest( - url='https://dub.co', -)) +res = s.metatags.get(request={ + "url": "https://dub.co", +}) if res is not None: # handle response diff --git a/docs/sdks/qrcodes/README.md b/docs/sdks/qrcodes/README.md index 64eed9a..41ce602 100644 --- a/docs/sdks/qrcodes/README.md +++ b/docs/sdks/qrcodes/README.md @@ -12,17 +12,16 @@ Retrieve a QR code for a link. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.qr_codes.get(request=operations.GetQRCodeRequest( - url='https://brief-micronutrient.org', -)) +res = s.qr_codes.get(request={ + "url": "https://brief-micronutrient.org", +}) if res is not None: # handle response diff --git a/docs/sdks/tags/README.md b/docs/sdks/tags/README.md index 7687eec..65b30b4 100644 --- a/docs/sdks/tags/README.md +++ b/docs/sdks/tags/README.md @@ -14,15 +14,14 @@ Retrieve a list of tags for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.tags.list(request=operations.GetTagsRequest()) +res = s.tags.list() if res is not None: # handle response @@ -30,12 +29,6 @@ if res is not None: ``` -### Parameters - -| Parameter | Type | Required | Description | -| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `request` | [operations.GetTagsRequest](../../models/operations/gettagsrequest.md) | :heavy_check_mark: | The request object to use for the request. | - ### Response @@ -62,15 +55,14 @@ Create a new tag for the authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.tags.create(request=operations.CreateTagRequestBody()) +res = s.tags.create(request={}) if res is not None: # handle response @@ -110,17 +102,14 @@ Update a tag in the workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.tags.update(request=operations.UpdateTagRequest( - id='', -)) +res = s.tags.update(id="", request_body={}) if res is not None: # handle response @@ -130,9 +119,10 @@ if res is not None: ### Parameters -| Parameter | Type | Required | Description | -| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -| `request` | [operations.UpdateTagRequest](../../models/operations/updatetagrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | +| `id` | *str* | :heavy_check_mark: | The ID of the tag to update. | +| `request_body` | [Optional[operations.UpdateTagRequestBody]](../../models/operations/updatetagrequestbody.md) | :heavy_minus_sign: | N/A | ### Response diff --git a/docs/sdks/track/README.md b/docs/sdks/track/README.md index c5f247a..270a481 100644 --- a/docs/sdks/track/README.md +++ b/docs/sdks/track/README.md @@ -14,19 +14,18 @@ Track a lead for a short link. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.track.lead(request=operations.TrackLeadRequestBody( - click_id='', - event_name='Sign up', - customer_id='', -)) +res = s.track.lead(request={ + "click_id": "", + "event_name": "Sign up", + "customer_id": "", +}) if res is not None: # handle response @@ -66,20 +65,20 @@ Track a sale for a short link. ### Example Usage ```python -import dub +from dub import Dub from dub.models import operations -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.track.sale(request=operations.TrackSaleRequestBody( - customer_id='', - amount=996500, - payment_processor=operations.PaymentProcessor.SHOPIFY, - event_name='Purchase', -)) +res = s.track.sale(request={ + "customer_id": "", + "amount": 996500, + "payment_processor": operations.PaymentProcessor.SHOPIFY, + "event_name": "Purchase", +}) if res is not None: # handle response @@ -119,17 +118,16 @@ Track a customer for an authenticated workspace. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.track.customer(request=operations.TrackCustomerRequestBody( - customer_id='', -)) +res = s.track.customer(request={ + "customer_id": "", +}) if res is not None: # handle response diff --git a/docs/sdks/workspaces/README.md b/docs/sdks/workspaces/README.md index 52f3559..6971822 100644 --- a/docs/sdks/workspaces/README.md +++ b/docs/sdks/workspaces/README.md @@ -13,17 +13,16 @@ Retrieve a workspace for the authenticated user. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.workspaces.get(request=operations.GetWorkspaceRequest( - id_or_slug='', -)) +res = s.workspaces.get(request={ + "id_or_slug": "", +}) if res is not None: # handle response @@ -63,17 +62,14 @@ Update a workspace by ID or slug. ### Example Usage ```python -import dub -from dub.models import operations +from dub import Dub -s = dub.Dub( +s = Dub( token="DUB_API_KEY", ) -res = s.workspaces.update(request=operations.UpdateWorkspaceRequest( - id_or_slug='', -)) +res = s.workspaces.update(id_or_slug="", request_body={}) if res is not None: # handle response @@ -83,9 +79,10 @@ if res is not None: ### Parameters -| Parameter | Type | Required | Description | -| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -| `request` | [operations.UpdateWorkspaceRequest](../../models/operations/updateworkspacerequest.md) | :heavy_check_mark: | The request object to use for the request. | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `id_or_slug` | *str* | :heavy_check_mark: | The ID or slug of the workspace to update. | +| `request_body` | [Optional[operations.UpdateWorkspaceRequestBody]](../../models/operations/updateworkspacerequestbody.md) | :heavy_minus_sign: | N/A | ### Response diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..e1cd91b --- /dev/null +++ b/poetry.lock @@ -0,0 +1,461 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "astroid" +version = "3.2.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "exceptiongroup" +version = "1.2.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jsonpath-python" +version = "1.0.6" +description = "A more powerful JSONPath implementation in modern python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, + {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pydantic" +version = "2.7.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.18.4" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pylint" +version = "3.2.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, + {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, +] + +[package.dependencies] +astroid = ">=3.2.2,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.5" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "d74e5c2487dc3b66f74055deefb72f2bdd00221c47f81ea872470d5603de35ca" diff --git a/pylintrc b/pylintrc index 57cd73b..856e918 100644 --- a/pylintrc +++ b/pylintrc @@ -59,10 +59,11 @@ ignore-paths= # Emacs file locks ignore-patterns=^\.# -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as @@ -93,6 +94,12 @@ py-version=3.8 # Discover python modules and packages in the file system subtree. recursive=no +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots=src + # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes @@ -473,6 +480,10 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +typealias-rgx=.* + # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= @@ -495,15 +506,12 @@ check-protected-access-in-special-methods=no defining-attr-methods=__init__, __new__, setUp, + asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls @@ -666,6 +674,8 @@ disable=raw-checker-failed, suppressed-message, useless-suppression, deprecated-pragma, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, use-symbolic-message-instead, trailing-whitespace, line-too-long, @@ -691,13 +701,15 @@ disable=raw-checker-failed, too-many-boolean-expressions, no-else-raise, bare-except, - broad-exception-caught + broad-exception-caught, + fixme, + consider-using-from-import # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member +enable= [METHOD_ARGS] @@ -743,8 +755,9 @@ evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor # used to format the message information. See doc for all details. msg-template= -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= @@ -778,8 +791,8 @@ min-similarity-lines=4 # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the 'python-enchant' package. +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. spelling-dict= # List of comma separated words that should be considered directives if they diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..c5a99d5 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,29 @@ +[tool.poetry] +name = "dub" +version = "0.0.19" +description = "Python Client SDK Generated by Speakeasy" +authors = ["Speakeasy",] +readme = "README.md" +repository = "https://github.com/dubinc/dub-python.git" +packages = [ + { include = "dub", from = "src" } +] +include = ["py.typed"] + +[tool.poetry.dependencies] +python = "^3.8" +httpx = "^0.27.0" +jsonpath-python = "^1.0.6" +pydantic = "~2.7.4" +python-dateutil = "^2.9.0.post0" +typing-inspect = "^0.9.0" + +[tool.poetry.group.dev.dependencies] +pylint = "==3.2.3" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +pythonpath = ["src"] diff --git a/scripts/publish.sh b/scripts/publish.sh index ed45d8a..6392f41 100755 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -1,9 +1,5 @@ #!/usr/bin/env bash -export TWINE_USERNAME=__token__ -export TWINE_PASSWORD=${PYPI_TOKEN} +export POETRY_PYPI_TOKEN_PYPI=${PYPI_TOKEN} -python -m pip install --upgrade pip -pip install setuptools wheel twine -python setup.py sdist bdist_wheel -twine upload dist/* +poetry publish --build diff --git a/setup.py b/setup.py deleted file mode 100644 index 393ceec..0000000 --- a/setup.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import setuptools -import re - -try: - with open('README.md', 'r') as fh: - long_description = fh.read() - GITHUB_URL = 'https://github.com/dubinc/dub-python.git' - GITHUB_URL = GITHUB_URL[: -len('.git')] if GITHUB_URL.endswith('.git') else GITHUB_URL - # links on PyPI should have absolute URLs - long_description = re.sub( - r'(\[[^\]]+\]\()((?!https?:)[^\)]+)(\))', - lambda m: m.group(1) + GITHUB_URL + '/blob/master/' + m.group(2) + m.group(3), - long_description, - ) -except FileNotFoundError: - long_description = '' - -setuptools.setup( - name='dub', - version='0.0.18', - author='Speakeasy', - description='Python Client SDK Generated by Speakeasy', - url='https://github.com/dubinc/dub-python.git', - long_description=long_description, - long_description_content_type='text/markdown', - packages=setuptools.find_packages(where='src'), - install_requires=[ - "certifi>=2023.7.22", - "charset-normalizer>=3.2.0", - "dataclasses-json>=0.6.4", - "idna>=3.4", - "jsonpath-python>=1.0.6", - "marshmallow>=3.19.0", - "mypy-extensions>=1.0.0", - "packaging>=23.1", - "python-dateutil>=2.8.2", - "requests>=2.31.0", - "six>=1.16.0", - "typing-inspect>=0.9.0", - "typing_extensions>=4.7.1", - "urllib3>=1.26.18", - ], - extras_require={ - "dev": [ - "pylint==3.1.0", - ], - }, - package_dir={'': 'src'}, - python_requires='>=3.8', - package_data={ - 'dub': ['py.typed'] - }, -) diff --git a/src/dub/_hooks/sdkhooks.py b/src/dub/_hooks/sdkhooks.py index 17750b6..535df20 100644 --- a/src/dub/_hooks/sdkhooks.py +++ b/src/dub/_hooks/sdkhooks.py @@ -1,10 +1,10 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests +import httpx from .types import SDKInitHook, BeforeRequestContext, BeforeRequestHook, AfterSuccessContext, AfterSuccessHook, AfterErrorContext, AfterErrorHook, Hooks from .registration import init_hooks from typing import List, Optional, Tuple - +from dub.httpclient import HttpClient class SDKHooks(Hooks): def __init__(self): @@ -26,12 +26,12 @@ def register_after_success_hook(self, hook: AfterSuccessHook) -> None: def register_after_error_hook(self, hook: AfterErrorHook) -> None: self.after_error_hooks.append(hook) - def sdk_init(self, base_url: str, client: requests.Session) -> Tuple[str, requests.Session]: + def sdk_init(self, base_url: str, client: HttpClient) -> Tuple[str, HttpClient]: for hook in self.sdk_init_hooks: base_url, client = hook.sdk_init(base_url, client) return base_url, client - def before_request(self, hook_ctx: BeforeRequestContext, request: requests.PreparedRequest) -> requests.PreparedRequest: + def before_request(self, hook_ctx: BeforeRequestContext, request: httpx.Request) -> httpx.Request: for hook in self.before_request_hooks: out = hook.before_request(hook_ctx, request) if isinstance(out, Exception): @@ -40,7 +40,7 @@ def before_request(self, hook_ctx: BeforeRequestContext, request: requests.Prepa return request - def after_success(self, hook_ctx: AfterSuccessContext, response: requests.Response) -> requests.Response: + def after_success(self, hook_ctx: AfterSuccessContext, response: httpx.Response) -> httpx.Response: for hook in self.after_success_hooks: out = hook.after_success(hook_ctx, response) if isinstance(out, Exception): @@ -48,7 +48,7 @@ def after_success(self, hook_ctx: AfterSuccessContext, response: requests.Respon response = out return response - def after_error(self, hook_ctx: AfterErrorContext, response: Optional[requests.Response], error: Optional[Exception]) -> Tuple[Optional[requests.Response], Optional[Exception]]: + def after_error(self, hook_ctx: AfterErrorContext, response: Optional[httpx.Response], error: Optional[Exception]) -> Tuple[Optional[httpx.Response], Optional[Exception]]: for hook in self.after_error_hooks: result = hook.after_error(hook_ctx, response, error) if isinstance(result, Exception): diff --git a/src/dub/_hooks/types.py b/src/dub/_hooks/types.py index b24c141..6b54347 100644 --- a/src/dub/_hooks/types.py +++ b/src/dub/_hooks/types.py @@ -1,7 +1,9 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http + from abc import ABC, abstractmethod +from dub.httpclient import HttpClient +import httpx from typing import Any, Callable, List, Optional, Tuple, Union @@ -34,25 +36,25 @@ def __init__(self, hook_ctx: HookContext): class SDKInitHook(ABC): @abstractmethod - def sdk_init(self, base_url: str, client: requests_http.Session) -> Tuple[str, requests_http.Session]: + def sdk_init(self, base_url: str, client: HttpClient) -> Tuple[str, HttpClient]: pass class BeforeRequestHook(ABC): @abstractmethod - def before_request(self, hook_ctx: BeforeRequestContext, request: requests_http.PreparedRequest) -> Union[requests_http.PreparedRequest, Exception]: + def before_request(self, hook_ctx: BeforeRequestContext, request: httpx.Request) -> Union[httpx.Request, Exception]: pass class AfterSuccessHook(ABC): @abstractmethod - def after_success(self, hook_ctx: AfterSuccessContext, response: requests_http.Response) -> Union[requests_http.Response, Exception]: + def after_success(self, hook_ctx: AfterSuccessContext, response: httpx.Response) -> Union[httpx.Response, Exception]: pass class AfterErrorHook(ABC): @abstractmethod - def after_error(self, hook_ctx: AfterErrorContext, response: Optional[requests_http.Response], error: Optional[Exception]) -> Union[Tuple[Optional[requests_http.Response], Optional[Exception]], Exception]: + def after_error(self, hook_ctx: AfterErrorContext, response: Optional[httpx.Response], error: Optional[Exception]) -> Union[Tuple[Optional[httpx.Response], Optional[Exception]], Exception]: pass diff --git a/src/dub/analytics.py b/src/dub/analytics.py index d2b167a..b09f13c 100644 --- a/src/dub/analytics.py +++ b/src/dub/analytics.py @@ -1,149 +1,171 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import errors, operations -from typing import Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import Optional, Union -class Analytics: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Analytics(BaseSDK): - def retrieve(self, request: operations.RetrieveAnalyticsRequest) -> operations.RetrieveAnalyticsResponseBody: + def retrieve( + self, *, + request: Optional[Union[operations.RetrieveAnalyticsRequest, operations.RetrieveAnalyticsRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.RetrieveAnalyticsResponseBody: r"""Retrieve analytics for a link, a domain, or the authenticated workspace. + Retrieve analytics for a link, a domain, or the authenticated workspace. The response type depends on the `event` and `type` query parameters. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='retrieveAnalytics', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.RetrieveAnalyticsGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, - ) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.RetrieveAnalyticsRequest) - url = utils.generate_url(base_url, '/analytics', request, _globals) + req = self.build_request( + method="GET", + path="/analytics", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.RetrieveAnalyticsGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="retrieveAnalytics", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.RetrieveAnalyticsResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def retrieve_async( + self, *, + request: Optional[Union[operations.RetrieveAnalyticsRequest, operations.RetrieveAnalyticsRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.RetrieveAnalyticsResponseBody: + r"""Retrieve analytics for a link, a domain, or the authenticated workspace. - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - + Retrieve analytics for a link, a domain, or the authenticated workspace. The response type depends on the `event` and `type` query parameters. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.RetrieveAnalyticsRequest) + req = self.build_request( + method="GET", + path="/analytics", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.RetrieveAnalyticsGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.RetrieveAnalyticsResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) - + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="retrieveAnalytics", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.RetrieveAnalyticsResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/basesdk.py b/src/dub/basesdk.py new file mode 100644 index 0000000..75fe6bd --- /dev/null +++ b/src/dub/basesdk.py @@ -0,0 +1,208 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from .sdkconfiguration import SDKConfiguration +from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext +from dub.models import errors +import dub.utils as utils +from dub.utils import RetryConfig, SerializedRequestBody +import httpx +from typing import Callable, List, Optional, Tuple + +class BaseSDK: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + def get_url(self, base_url, url_variables): + sdk_url, sdk_variables = self.sdk_configuration.get_server_details() + + if base_url is None: + base_url = sdk_url + + if url_variables is None: + url_variables = sdk_variables + + return utils.template_url(base_url, url_variables) + + def build_request( + self, + method, + path, + base_url, + url_variables, + request, + request_body_required, + request_has_path_params, + request_has_query_params, + user_agent_header, + accept_header_value, + _globals=None, + security=None, + get_serialized_body: Optional[ + Callable[[], Optional[SerializedRequestBody]] + ] = None, + url_override: Optional[str] = None, + ) -> httpx.Request: + client = self.sdk_configuration.client + + query_params = {} + + url = url_override + if url is None: + url = utils.generate_url( + self.get_url(base_url, url_variables), + path, + request if request_has_path_params else None, + _globals if request_has_path_params else None, + ) + + query_params = utils.get_query_params( + request if request_has_query_params else None, + _globals if request_has_query_params else None, + ) + + headers = utils.get_headers(request, _globals) + headers["Accept"] = accept_header_value + headers[user_agent_header] = self.sdk_configuration.user_agent + + if security is not None: + if callable(security): + security = security() + + security_headers, security_query_params = utils.get_security(security) + headers = {**headers, **security_headers} + query_params = {**query_params, **security_query_params} + + serialized_request_body = SerializedRequestBody("application/octet-stream") + if get_serialized_body is not None: + rb = get_serialized_body() + if request_body_required and rb is None: + raise ValueError("request body is required") + + if rb is not None: + serialized_request_body = rb + + if ( + serialized_request_body.media_type is not None + and serialized_request_body.media_type + not in ( + "multipart/form-data", + "multipart/mixed", + ) + ): + headers["content-type"] = serialized_request_body.media_type + + return client.build_request( + method, + url, + params=query_params, + content=serialized_request_body.content, + data=serialized_request_body.data, + files=serialized_request_body.files, + headers=headers, + ) + + def do_request( + self, + hook_ctx, + request, + error_status_codes, + retry_config: Optional[Tuple[RetryConfig, List[str]]] = None, + ) -> httpx.Response: + client = self.sdk_configuration.client + + def do(): + http_res = None + try: + req = self.sdk_configuration.get_hooks().before_request( + BeforeRequestContext(hook_ctx), request + ) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), None, e + ) + if e is not None: + raise e + + if http_res is None: + raise errors.SDKError("No response received") + + if utils.match_status_codes(error_status_codes, http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), http_res, None + ) + if e is not None: + raise e + if result is not None: + http_res = result + else: + raise errors.SDKError("Unexpected error occurred") + + return http_res + + if retry_config is not None: + http_res = utils.retry(do, utils.Retries(retry_config[0], retry_config[1])) + else: + http_res = do() + + if not utils.match_status_codes(error_status_codes, http_res.status_code): + http_res = self.sdk_configuration.get_hooks().after_success( + AfterSuccessContext(hook_ctx), http_res + ) + + return http_res + + async def do_request_async( + self, + hook_ctx, + request, + error_status_codes, + retry_config: Optional[Tuple[RetryConfig, List[str]]] = None, + ) -> httpx.Response: + client = self.sdk_configuration.async_client + + async def do(): + http_res = None + try: + req = self.sdk_configuration.get_hooks().before_request( + BeforeRequestContext(hook_ctx), request + ) + http_res = await client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), None, e + ) + if e is not None: + raise e + + if http_res is None: + raise errors.SDKError("No response received") + + if utils.match_status_codes(error_status_codes, http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), http_res, None + ) + if e is not None: + raise e + if result is not None: + http_res = result + else: + raise errors.SDKError("Unexpected error occurred") + + return http_res + + if retry_config is not None: + http_res = await utils.retry_async( + do, utils.Retries(retry_config[0], retry_config[1]) + ) + else: + http_res = await do() + + if not utils.match_status_codes(error_status_codes, http_res.status_code): + http_res = self.sdk_configuration.get_hooks().after_success( + AfterSuccessContext(hook_ctx), http_res + ) + + return http_res diff --git a/src/dub/domains.py b/src/dub/domains.py index 6231b64..7961e4e 100644 --- a/src/dub/domains.py +++ b/src/dub/domains.py @@ -1,560 +1,661 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import components, errors, operations -from typing import List, Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import List, Optional, Union -class Domains: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Domains(BaseSDK): - def list(self, request: operations.ListDomainsRequest) -> List[components.DomainSchema]: + def list( + self, *, + server_url: Optional[str] = None, + ) -> List[components.DomainSchema]: r"""Retrieve a list of domains + Retrieve a list of domains associated with the authenticated workspace. + + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='listDomains', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.ListDomainsGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.ListDomainsRequest( ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/domains', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + req = self.build_request( + method="GET", + path="/domains", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.ListDomainsGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="listDomains", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.DomainSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def list_async( + self, *, + server_url: Optional[str] = None, + ) -> List[components.DomainSchema]: + r"""Retrieve a list of domains - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[List[components.DomainSchema]]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Retrieve a list of domains associated with the authenticated workspace. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.ListDomainsRequest( + ) + + req = self.build_request( + method="GET", + path="/domains", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.ListDomainsGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="listDomains", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.DomainSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def create(self, request: Optional[operations.CreateDomainRequestBody] = None) -> components.DomainSchema: + def create( + self, *, + request: Optional[Union[operations.CreateDomainRequestBody, operations.CreateDomainRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.DomainSchema: r"""Create a domain + Create a domain for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='createDomain', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.CreateDomainGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.CreateDomainRequestBody) + + req = self.build_request( + method="POST", + path="/domains", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.CreateDomainGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.CreateDomainRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="createDomain", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/domains', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.CreateDomainRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "201", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.DomainSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def create_async( + self, *, + request: Optional[Union[operations.CreateDomainRequestBody, operations.CreateDomainRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.DomainSchema: + r"""Create a domain - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 201: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.DomainSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Create a domain for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.CreateDomainRequestBody) + + req = self.build_request( + method="POST", + path="/domains", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.CreateDomainGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.CreateDomainRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="createDomain", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "201", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.DomainSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def delete(self, slug: str) -> operations.DeleteDomainResponseBody: + def delete( + self, *, + slug: str, + server_url: Optional[str] = None, + ) -> operations.DeleteDomainResponseBody: r"""Delete a domain + Delete a domain from a workspace. It cannot be undone. This will also delete all the links associated with the domain. + + :param slug: The domain name. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='deleteDomain', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + request = operations.DeleteDomainRequest( slug=slug, ) - _globals = operations.DeleteDomainGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + req = self.build_request( + method="DELETE", + path="/domains/{slug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.DeleteDomainGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/domains/{slug}', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('DELETE', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + http_res = self.do_request( + hook_ctx=HookContext(operation_id="deleteDomain", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.DeleteDomainResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def delete_async( + self, *, + slug: str, + server_url: Optional[str] = None, + ) -> operations.DeleteDomainResponseBody: + r"""Delete a domain - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.DeleteDomainResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Delete a domain from a workspace. It cannot be undone. This will also delete all the links associated with the domain. + :param slug: The domain name. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.DeleteDomainRequest( + slug=slug, + ) + + req = self.build_request( + method="DELETE", + path="/domains/{slug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.DeleteDomainGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="deleteDomain", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.DeleteDomainResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def update(self, slug: str, request_body: Optional[operations.UpdateDomainRequestBody] = None) -> components.DomainSchema: + def update( + self, *, + slug: str, + request_body: Optional[Union[operations.UpdateDomainRequestBody, operations.UpdateDomainRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.DomainSchema: r"""Update a domain + Update a domain for the authenticated workspace. + + :param slug: The domain name. + :param request_body: + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='updateDomain', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + request = operations.UpdateDomainRequest( slug=slug, - request_body=request_body, + request_body=utils.unmarshal(request_body, operations.UpdateDomainRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + + req = self.build_request( + method="PATCH", + path="/domains/{slug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpdateDomainGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateDomainRequestBody]), ) - _globals = operations.UpdateDomainGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + http_res = self.do_request( + hook_ctx=HookContext(operation_id="updateDomain", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/domains/{slug}', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, operations.UpdateDomainRequest, "request_body", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('PATCH', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.DomainSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def update_async( + self, *, + slug: str, + request_body: Optional[Union[operations.UpdateDomainRequestBody, operations.UpdateDomainRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.DomainSchema: + r"""Update a domain - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.DomainSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Update a domain for the authenticated workspace. + :param slug: The domain name. + :param request_body: + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.UpdateDomainRequest( + slug=slug, + request_body=utils.unmarshal(request_body, operations.UpdateDomainRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + + req = self.build_request( + method="PATCH", + path="/domains/{slug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpdateDomainGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateDomainRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="updateDomain", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.DomainSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/httpclient.py b/src/dub/httpclient.py new file mode 100644 index 0000000..985e31c --- /dev/null +++ b/src/dub/httpclient.py @@ -0,0 +1,78 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +# pyright: reportReturnType = false +from typing_extensions import Protocol, runtime_checkable +import httpx +from typing import Any, Optional, Union + + +@runtime_checkable +class HttpClient(Protocol): + def send( + self, + request: httpx.Request, + *, + stream: bool = False, + auth: Union[ + httpx._types.AuthTypes, httpx._client.UseClientDefault, None + ] = httpx.USE_CLIENT_DEFAULT, + follow_redirects: Union[ + bool, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + ) -> httpx.Response: + pass + + def build_request( + self, + method: str, + url: httpx._types.URLTypes, + *, + content: Optional[httpx._types.RequestContent] = None, + data: Optional[httpx._types.RequestData] = None, + files: Optional[httpx._types.RequestFiles] = None, + json: Optional[Any] = None, + params: Optional[httpx._types.QueryParamTypes] = None, + headers: Optional[httpx._types.HeaderTypes] = None, + cookies: Optional[httpx._types.CookieTypes] = None, + timeout: Union[ + httpx._types.TimeoutTypes, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + extensions: Optional[httpx._types.RequestExtensions] = None, + ) -> httpx.Request: + pass + + +@runtime_checkable +class AsyncHttpClient(Protocol): + async def send( + self, + request: httpx.Request, + *, + stream: bool = False, + auth: Union[ + httpx._types.AuthTypes, httpx._client.UseClientDefault, None + ] = httpx.USE_CLIENT_DEFAULT, + follow_redirects: Union[ + bool, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + ) -> httpx.Response: + pass + + def build_request( + self, + method: str, + url: httpx._types.URLTypes, + *, + content: Optional[httpx._types.RequestContent] = None, + data: Optional[httpx._types.RequestData] = None, + files: Optional[httpx._types.RequestFiles] = None, + json: Optional[Any] = None, + params: Optional[httpx._types.QueryParamTypes] = None, + headers: Optional[httpx._types.HeaderTypes] = None, + cookies: Optional[httpx._types.CookieTypes] = None, + timeout: Union[ + httpx._types.TimeoutTypes, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + extensions: Optional[httpx._types.RequestExtensions] = None, + ) -> httpx.Request: + pass diff --git a/src/dub/links.py b/src/dub/links.py index 57c1fbd..d9d6f15 100644 --- a/src/dub/links.py +++ b/src/dub/links.py @@ -1,1223 +1,1465 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import components, errors, operations -from typing import List, Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import List, Optional, Union -class Links: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Links(BaseSDK): - def list(self, request: operations.GetLinksRequest) -> List[components.LinkSchema]: + def list( + self, *, + request: Optional[Union[operations.GetLinksRequest, operations.GetLinksRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> List[components.LinkSchema]: r"""Retrieve a list of links + Retrieve a paginated list of links for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getLinks', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.GetLinksGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetLinksRequest) + + req = self.build_request( + method="GET", + path="/links", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetLinksGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getLinks", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def list_async( + self, *, + request: Optional[Union[operations.GetLinksRequest, operations.GetLinksRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> List[components.LinkSchema]: + r"""Retrieve a list of links - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Retrieve a paginated list of links for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetLinksRequest) + + req = self.build_request( + method="GET", + path="/links", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetLinksGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getLinks", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def create(self, request: Optional[operations.CreateLinkRequestBody] = None) -> components.LinkSchema: + def create( + self, *, + request: Optional[Union[operations.CreateLinkRequestBody, operations.CreateLinkRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: r"""Create a new link + Create a new link for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='createLink', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.CreateLinkGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.CreateLinkRequestBody) + + req = self.build_request( + method="POST", + path="/links", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.CreateLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.CreateLinkRequestBody]), ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.CreateLinkRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + http_res = self.do_request( + hook_ctx=HookContext(operation_id="createLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def create_async( + self, *, + request: Optional[Union[operations.CreateLinkRequestBody, operations.CreateLinkRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: + r"""Create a new link - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Create a new link for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.CreateLinkRequestBody) + + req = self.build_request( + method="POST", + path="/links", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.CreateLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.CreateLinkRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="createLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def count(self, request: operations.GetLinksCountRequest) -> float: + def count( + self, *, + request: Optional[Union[operations.GetLinksCountRequest, operations.GetLinksCountRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> float: r"""Retrieve links count + Retrieve the number of links for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getLinksCount', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.GetLinksCountGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetLinksCountRequest) + + req = self.build_request( + method="GET", + path="/links/count", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetLinksCountGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links/count', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getLinksCount", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[float]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def count_async( + self, *, + request: Optional[Union[operations.GetLinksCountRequest, operations.GetLinksCountRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> float: + r"""Retrieve links count - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[float]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Retrieve the number of links for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetLinksCountRequest) + + req = self.build_request( + method="GET", + path="/links/count", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetLinksCountGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getLinksCount", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[float]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def get(self, request: operations.GetLinkInfoRequest) -> components.LinkSchema: + def get( + self, *, + request: Optional[Union[operations.GetLinkInfoRequest, operations.GetLinkInfoRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: r"""Retrieve a link + Retrieve the info for a link. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getLinkInfo', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.GetLinkInfoGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetLinkInfoRequest) + + req = self.build_request( + method="GET", + path="/links/info", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetLinkInfoGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getLinkInfo", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links/info', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def get_async( + self, *, + request: Optional[Union[operations.GetLinkInfoRequest, operations.GetLinkInfoRequestTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: + r"""Retrieve a link - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Retrieve the info for a link. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetLinkInfoRequest) + + req = self.build_request( + method="GET", + path="/links/info", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetLinkInfoGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getLinkInfo", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def delete(self, link_id: str) -> operations.DeleteLinkResponseBody: + def delete( + self, *, + link_id: str, + server_url: Optional[str] = None, + ) -> operations.DeleteLinkResponseBody: r"""Delete a link + Delete a link for the authenticated workspace. + + :param link_id: The id of the link to delete. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='deleteLink', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + request = operations.DeleteLinkRequest( link_id=link_id, ) - _globals = operations.DeleteLinkGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + req = self.build_request( + method="DELETE", + path="/links/{linkId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.DeleteLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="deleteLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links/{linkId}', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('DELETE', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.DeleteLinkResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def delete_async( + self, *, + link_id: str, + server_url: Optional[str] = None, + ) -> operations.DeleteLinkResponseBody: + r"""Delete a link - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.DeleteLinkResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Delete a link for the authenticated workspace. + :param link_id: The id of the link to delete. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.DeleteLinkRequest( + link_id=link_id, + ) + + req = self.build_request( + method="DELETE", + path="/links/{linkId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.DeleteLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="deleteLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.DeleteLinkResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def update(self, link_id: str, request_body: Optional[operations.UpdateLinkRequestBody] = None) -> components.LinkSchema: + def update( + self, *, + link_id: str, + request_body: Optional[Union[operations.UpdateLinkRequestBody, operations.UpdateLinkRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: r"""Update a link + Update a link for the authenticated workspace. If there's no change, returns it as it is. + + :param link_id: The id of the link to update. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`. + :param request_body: + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='updateLink', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + request = operations.UpdateLinkRequest( link_id=link_id, - request_body=request_body, + request_body=utils.unmarshal(request_body, operations.UpdateLinkRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, ) - _globals = operations.UpdateLinkGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + req = self.build_request( + method="PATCH", + path="/links/{linkId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpdateLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateLinkRequestBody]), ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links/{linkId}', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, operations.UpdateLinkRequest, "request_body", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('PATCH', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + http_res = self.do_request( + hook_ctx=HookContext(operation_id="updateLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def update_async( + self, *, + link_id: str, + request_body: Optional[Union[operations.UpdateLinkRequestBody, operations.UpdateLinkRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: + r"""Update a link - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Update a link for the authenticated workspace. If there's no change, returns it as it is. + :param link_id: The id of the link to update. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`. + :param request_body: + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.UpdateLinkRequest( + link_id=link_id, + request_body=utils.unmarshal(request_body, operations.UpdateLinkRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + + req = self.build_request( + method="PATCH", + path="/links/{linkId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpdateLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateLinkRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="updateLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def create_many(self, request: Optional[List[operations.RequestBody]] = None) -> List[components.LinkSchema]: + def create_many( + self, *, + request: Optional[Union[List[operations.RequestBody], List[operations.RequestBodyTypedDict]]] = None, + server_url: Optional[str] = None, + ) -> List[components.LinkSchema]: r"""Bulk create links + Bulk create up to 100 links for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='bulkCreateLinks', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.BulkCreateLinksGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, List[operations.RequestBody]) + + req = self.build_request( + method="POST", + path="/links/bulk", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.BulkCreateLinksGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[List[operations.RequestBody]]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="bulkCreateLinks", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links/bulk', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[List[operations.RequestBody]], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def create_many_async( + self, *, + request: Optional[Union[List[operations.RequestBody], List[operations.RequestBodyTypedDict]]] = None, + server_url: Optional[str] = None, + ) -> List[components.LinkSchema]: + r"""Bulk create links - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Bulk create up to 100 links for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, List[operations.RequestBody]) + + req = self.build_request( + method="POST", + path="/links/bulk", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.BulkCreateLinksGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[List[operations.RequestBody]]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="bulkCreateLinks", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def update_many(self, request: Optional[operations.BulkUpdateLinksRequestBody] = None) -> List[components.LinkSchema]: + def update_many( + self, *, + request: Optional[Union[operations.BulkUpdateLinksRequestBody, operations.BulkUpdateLinksRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> List[components.LinkSchema]: r"""Bulk update links + Bulk update up to 100 links with the same data for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='bulkUpdateLinks', oauth2_scopes=[], security_source=self.sdk_configuration.security) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = base_url + '/links/bulk' - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.BulkUpdateLinksRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('PATCH', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.BulkUpdateLinksRequestBody) + + req = self.build_request( + method="PATCH", + path="/links/bulk", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.BulkUpdateLinksRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="bulkUpdateLinks", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def update_many_async( + self, *, + request: Optional[Union[operations.BulkUpdateLinksRequestBody, operations.BulkUpdateLinksRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> List[components.LinkSchema]: + r"""Bulk update links - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Bulk update up to 100 links with the same data for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.BulkUpdateLinksRequestBody) + + req = self.build_request( + method="PATCH", + path="/links/bulk", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.BulkUpdateLinksRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="bulkUpdateLinks", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.LinkSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def upsert(self, request: Optional[operations.UpsertLinkRequestBody] = None) -> components.LinkSchema: + def upsert( + self, *, + request: Optional[Union[operations.UpsertLinkRequestBody, operations.UpsertLinkRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: r"""Upsert a link + Upsert a link for the authenticated workspace by its URL. If a link with the same URL already exists, return it (or update it if there are any changes). Otherwise, a new link will be created. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='upsertLink', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.UpsertLinkGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.UpsertLinkRequestBody) + + req = self.build_request( + method="PUT", + path="/links/upsert", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpsertLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.UpsertLinkRequestBody]), ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/links/upsert', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.UpsertLinkRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('PUT', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + http_res = self.do_request( + hook_ctx=HookContext(operation_id="upsertLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def upsert_async( + self, *, + request: Optional[Union[operations.UpsertLinkRequestBody, operations.UpsertLinkRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.LinkSchema: + r"""Upsert a link - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Upsert a link for the authenticated workspace by its URL. If a link with the same URL already exists, return it (or update it if there are any changes). Otherwise, a new link will be created. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.UpsertLinkRequestBody) + + req = self.build_request( + method="PUT", + path="/links/upsert", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpsertLinkGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.UpsertLinkRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="upsertLink", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.LinkSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/metatags.py b/src/dub/metatags.py index d6ddd28..14e2b29 100644 --- a/src/dub/metatags.py +++ b/src/dub/metatags.py @@ -1,72 +1,111 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import errors, operations -from typing import Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import Optional, Union -class Metatags: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Metatags(BaseSDK): - def get(self, request: operations.GetMetatagsRequest) -> operations.GetMetatagsResponseBody: + def get( + self, *, + request: Union[operations.GetMetatagsRequest, operations.GetMetatagsRequestTypedDict], + server_url: Optional[str] = None, + ) -> operations.GetMetatagsResponseBody: r"""Retrieve the metatags for a URL + Retrieve the metatags for a URL. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getMetatags', oauth2_scopes=[], security_source=self.sdk_configuration.security) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetMetatagsRequest) - url = base_url + '/metatags' + req = self.build_request( + method="GET", + path="/metatags", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + ) - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getMetatags", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + ) - query_params = { **utils.get_query_params(request), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.GetMetatagsResponseBody]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def get_async( + self, *, + request: Union[operations.GetMetatagsRequest, operations.GetMetatagsRequestTypedDict], + server_url: Optional[str] = None, + ) -> operations.GetMetatagsResponseBody: + r"""Retrieve the metatags for a URL - if utils.match_status_codes(['4XX','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - + Retrieve the metatags for a URL. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetMetatagsRequest) + req = self.build_request( + method="GET", + path="/metatags", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + ) - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.GetMetatagsResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) - + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getMetatags", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.GetMetatagsResponseBody]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/models/components/clicksbrowsers.py b/src/dub/models/components/clicksbrowsers.py index ba81005..b96b1a8 100644 --- a/src/dub/models/components/clicksbrowsers.py +++ b/src/dub/models/components/clicksbrowsers.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksBrowsers: - browser: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('browser') }}) +class ClicksBrowsersTypedDict(TypedDict): + browser: str r"""The name of the browser""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this browser""" +class ClicksBrowsers(BaseModel): + browser: str + r"""The name of the browser""" + clicks: float + r"""The number of clicks from this browser""" + diff --git a/src/dub/models/components/clickscities.py b/src/dub/models/components/clickscities.py index ae7ac09..ced83b0 100644 --- a/src/dub/models/components/clickscities.py +++ b/src/dub/models/components/clickscities.py @@ -1,274 +1,279 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class ClicksCitiesCountry(str, Enum): r"""The 2-letter country code of the city: https://d.to/geo""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksCities: - city: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('city') }}) +class ClicksCitiesTypedDict(TypedDict): + city: str r"""The name of the city""" - country: ClicksCitiesCountry = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country') }}) + country: ClicksCitiesCountry r"""The 2-letter country code of the city: https://d.to/geo""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this city""" +class ClicksCities(BaseModel): + city: str + r"""The name of the city""" + country: ClicksCitiesCountry + r"""The 2-letter country code of the city: https://d.to/geo""" + clicks: float + r"""The number of clicks from this city""" + diff --git a/src/dub/models/components/clickscount.py b/src/dub/models/components/clickscount.py index c3f35e5..5a9a615 100644 --- a/src/dub/models/components/clickscount.py +++ b/src/dub/models/components/clickscount.py @@ -1,15 +1,16 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksCount: - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) +class ClicksCountTypedDict(TypedDict): + clicks: float r"""The total number of clicks""" +class ClicksCount(BaseModel): + clicks: float + r"""The total number of clicks""" + diff --git a/src/dub/models/components/clickscountries.py b/src/dub/models/components/clickscountries.py index c7add35..ccefb68 100644 --- a/src/dub/models/components/clickscountries.py +++ b/src/dub/models/components/clickscountries.py @@ -1,272 +1,275 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class Country(str, Enum): r"""The 2-letter country code: https://d.to/geo""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksCountries: - country: Country = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country') }}) +class ClicksCountriesTypedDict(TypedDict): + country: Country r"""The 2-letter country code: https://d.to/geo""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this country""" +class ClicksCountries(BaseModel): + country: Country + r"""The 2-letter country code: https://d.to/geo""" + clicks: float + r"""The number of clicks from this country""" + diff --git a/src/dub/models/components/clicksdevices.py b/src/dub/models/components/clicksdevices.py index f841735..f19cf6c 100644 --- a/src/dub/models/components/clicksdevices.py +++ b/src/dub/models/components/clicksdevices.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksDevices: - device: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('device') }}) +class ClicksDevicesTypedDict(TypedDict): + device: str r"""The name of the device""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this device""" +class ClicksDevices(BaseModel): + device: str + r"""The name of the device""" + clicks: float + r"""The number of clicks from this device""" + diff --git a/src/dub/models/components/clicksos.py b/src/dub/models/components/clicksos.py index 56413a3..cf35c56 100644 --- a/src/dub/models/components/clicksos.py +++ b/src/dub/models/components/clicksos.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksOS: - os: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('os') }}) +class ClicksOSTypedDict(TypedDict): + os: str r"""The name of the OS""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this OS""" +class ClicksOS(BaseModel): + os: str + r"""The name of the OS""" + clicks: float + r"""The number of clicks from this OS""" + diff --git a/src/dub/models/components/clicksreferers.py b/src/dub/models/components/clicksreferers.py index b4905db..875b0f6 100644 --- a/src/dub/models/components/clicksreferers.py +++ b/src/dub/models/components/clicksreferers.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksReferers: - referer: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('referer') }}) +class ClicksReferersTypedDict(TypedDict): + referer: str r"""The name of the referer. If unknown, this will be `(direct)`""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this referer""" +class ClicksReferers(BaseModel): + referer: str + r"""The name of the referer. If unknown, this will be `(direct)`""" + clicks: float + r"""The number of clicks from this referer""" + diff --git a/src/dub/models/components/clickstimeseries.py b/src/dub/models/components/clickstimeseries.py index b375acb..2863a8c 100644 --- a/src/dub/models/components/clickstimeseries.py +++ b/src/dub/models/components/clickstimeseries.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksTimeseries: - start: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start') }}) +class ClicksTimeseriesTypedDict(TypedDict): + start: str r"""The starting timestamp of the interval""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks in the interval""" +class ClicksTimeseries(BaseModel): + start: str + r"""The starting timestamp of the interval""" + clicks: float + r"""The number of clicks in the interval""" + diff --git a/src/dub/models/components/clickstoplinks.py b/src/dub/models/components/clickstoplinks.py index f6e1d80..328bf83 100644 --- a/src/dub/models/components/clickstoplinks.py +++ b/src/dub/models/components/clickstoplinks.py @@ -1,32 +1,46 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import pydantic +from typing import TypedDict +from typing_extensions import Annotated -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksTopLinks: - link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('link') }}) - r"""The unique ID of the short link - - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class ClicksTopLinksTypedDict(TypedDict): + link: str + r"""The unique ID of the short link""" + id: str r"""The unique ID of the short link""" - domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) + domain: str r"""The domain of the short link""" - key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }}) + key: str r"""The key of the short link""" - short_link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shortLink') }}) + short_link: str r"""The short link URL""" - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) + url: str r"""The destination URL of the short link""" - created_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('createdAt') }}) + created_at: str r"""The creation timestamp of the short link""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this link""" +class ClicksTopLinks(BaseModel): + link: Annotated[str, pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.")] + r"""The unique ID of the short link""" + id: str + r"""The unique ID of the short link""" + domain: str + r"""The domain of the short link""" + key: str + r"""The key of the short link""" + short_link: Annotated[str, pydantic.Field(alias="shortLink")] + r"""The short link URL""" + url: str + r"""The destination URL of the short link""" + created_at: Annotated[str, pydantic.Field(alias="createdAt")] + r"""The creation timestamp of the short link""" + clicks: float + r"""The number of clicks from this link""" + diff --git a/src/dub/models/components/clickstopurls.py b/src/dub/models/components/clickstopurls.py index 0d99503..aedc7ca 100644 --- a/src/dub/models/components/clickstopurls.py +++ b/src/dub/models/components/clickstopurls.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClicksTopUrls: - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class ClicksTopUrlsTypedDict(TypedDict): + url: str r"""The destination URL""" - clicks: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks') }}) + clicks: float r"""The number of clicks from this URL""" +class ClicksTopUrls(BaseModel): + url: str + r"""The destination URL""" + clicks: float + r"""The number of clicks from this URL""" + diff --git a/src/dub/models/components/countrycode.py b/src/dub/models/components/countrycode.py index 6ecb1de..cc0fb23 100644 --- a/src/dub/models/components/countrycode.py +++ b/src/dub/models/components/countrycode.py @@ -6,253 +6,254 @@ class CountryCode(str, Enum): r"""The country to retrieve analytics for.""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" + diff --git a/src/dub/models/components/domainschema.py b/src/dub/models/components/domainschema.py index 309033f..ccb3e43 100644 --- a/src/dub/models/components/domainschema.py +++ b/src/dub/models/components/domainschema.py @@ -1,32 +1,76 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel, Nullable +import pydantic +from pydantic import model_serializer +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DomainSchema: - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class DomainSchemaTypedDict(TypedDict): + id: str r"""The unique identifier of the domain.""" - slug: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slug') }}) + slug: str r"""The domain name.""" - expired_url: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl') }}) + expired_url: Nullable[str] r"""The URL to redirect to when a link under this domain has expired.""" - created_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('createdAt') }}) + created_at: str r"""The date the domain was created.""" - updated_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('updatedAt') }}) + updated_at: str r"""The date the domain was last updated.""" - verified: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('verified'), 'exclude': lambda f: f is None }}) + verified: NotRequired[bool] r"""Whether the domain is verified.""" - primary: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary'), 'exclude': lambda f: f is None }}) + primary: NotRequired[bool] r"""Whether the domain is the primary domain for the workspace.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: NotRequired[bool] r"""Whether the domain is archived.""" - placeholder: Optional[str] = dataclasses.field(default='https://dub.co/help/article/what-is-dub', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('placeholder'), 'exclude': lambda f: f is None }}) + placeholder: NotRequired[str] r"""Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened.""" +class DomainSchema(BaseModel): + id: str + r"""The unique identifier of the domain.""" + slug: str + r"""The domain name.""" + expired_url: Annotated[Nullable[str], pydantic.Field(alias="expiredUrl")] + r"""The URL to redirect to when a link under this domain has expired.""" + created_at: Annotated[str, pydantic.Field(alias="createdAt")] + r"""The date the domain was created.""" + updated_at: Annotated[str, pydantic.Field(alias="updatedAt")] + r"""The date the domain was last updated.""" + verified: Optional[bool] = False + r"""Whether the domain is verified.""" + primary: Optional[bool] = False + r"""Whether the domain is the primary domain for the workspace.""" + archived: Optional[bool] = False + r"""Whether the domain is archived.""" + placeholder: Optional[str] = "https://dub.co/help/article/what-is-dub" + r"""Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened.""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["verified", "primary", "archived", "placeholder"] + nullable_fields = ["expiredUrl"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + + return m + diff --git a/src/dub/models/components/leadsbrowsers.py b/src/dub/models/components/leadsbrowsers.py index 6b9ed06..4f421b8 100644 --- a/src/dub/models/components/leadsbrowsers.py +++ b/src/dub/models/components/leadsbrowsers.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsBrowsers: - browser: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('browser') }}) +class LeadsBrowsersTypedDict(TypedDict): + browser: str r"""The name of the browser""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this browser""" +class LeadsBrowsers(BaseModel): + browser: str + r"""The name of the browser""" + leads: float + r"""The number of leads from this browser""" + diff --git a/src/dub/models/components/leadscities.py b/src/dub/models/components/leadscities.py index 05a0d05..f0efee7 100644 --- a/src/dub/models/components/leadscities.py +++ b/src/dub/models/components/leadscities.py @@ -1,274 +1,279 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class LeadsCitiesCountry(str, Enum): r"""The 2-letter country code of the city: https://d.to/geo""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsCities: - city: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('city') }}) +class LeadsCitiesTypedDict(TypedDict): + city: str r"""The name of the city""" - country: LeadsCitiesCountry = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country') }}) + country: LeadsCitiesCountry r"""The 2-letter country code of the city: https://d.to/geo""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this city""" +class LeadsCities(BaseModel): + city: str + r"""The name of the city""" + country: LeadsCitiesCountry + r"""The 2-letter country code of the city: https://d.to/geo""" + leads: float + r"""The number of leads from this city""" + diff --git a/src/dub/models/components/leadscount.py b/src/dub/models/components/leadscount.py index 0aaac6c..8c16af3 100644 --- a/src/dub/models/components/leadscount.py +++ b/src/dub/models/components/leadscount.py @@ -1,15 +1,16 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsCount: - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) +class LeadsCountTypedDict(TypedDict): + leads: float r"""The total number of leads""" +class LeadsCount(BaseModel): + leads: float + r"""The total number of leads""" + diff --git a/src/dub/models/components/leadscountries.py b/src/dub/models/components/leadscountries.py index accafc8..5423b94 100644 --- a/src/dub/models/components/leadscountries.py +++ b/src/dub/models/components/leadscountries.py @@ -1,272 +1,275 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class LeadsCountriesCountry(str, Enum): r"""The 2-letter country code: https://d.to/geo""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsCountries: - country: LeadsCountriesCountry = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country') }}) +class LeadsCountriesTypedDict(TypedDict): + country: LeadsCountriesCountry r"""The 2-letter country code: https://d.to/geo""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this country""" +class LeadsCountries(BaseModel): + country: LeadsCountriesCountry + r"""The 2-letter country code: https://d.to/geo""" + leads: float + r"""The number of leads from this country""" + diff --git a/src/dub/models/components/leadsdevices.py b/src/dub/models/components/leadsdevices.py index 68fd2f3..fa67d09 100644 --- a/src/dub/models/components/leadsdevices.py +++ b/src/dub/models/components/leadsdevices.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsDevices: - device: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('device') }}) +class LeadsDevicesTypedDict(TypedDict): + device: str r"""The name of the device""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this device""" +class LeadsDevices(BaseModel): + device: str + r"""The name of the device""" + leads: float + r"""The number of leads from this device""" + diff --git a/src/dub/models/components/leadsos.py b/src/dub/models/components/leadsos.py index 8a70652..70d5d7a 100644 --- a/src/dub/models/components/leadsos.py +++ b/src/dub/models/components/leadsos.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsOS: - os: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('os') }}) +class LeadsOSTypedDict(TypedDict): + os: str r"""The name of the OS""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this OS""" +class LeadsOS(BaseModel): + os: str + r"""The name of the OS""" + leads: float + r"""The number of leads from this OS""" + diff --git a/src/dub/models/components/leadsreferers.py b/src/dub/models/components/leadsreferers.py index bdcc4e7..813ce34 100644 --- a/src/dub/models/components/leadsreferers.py +++ b/src/dub/models/components/leadsreferers.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsReferers: - referer: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('referer') }}) +class LeadsReferersTypedDict(TypedDict): + referer: str r"""The name of the referer. If unknown, this will be `(direct)`""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this referer""" +class LeadsReferers(BaseModel): + referer: str + r"""The name of the referer. If unknown, this will be `(direct)`""" + leads: float + r"""The number of leads from this referer""" + diff --git a/src/dub/models/components/leadstimeseries.py b/src/dub/models/components/leadstimeseries.py index a47698b..b47a055 100644 --- a/src/dub/models/components/leadstimeseries.py +++ b/src/dub/models/components/leadstimeseries.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsTimeseries: - start: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start') }}) +class LeadsTimeseriesTypedDict(TypedDict): + start: str r"""The starting timestamp of the interval""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads in the interval""" +class LeadsTimeseries(BaseModel): + start: str + r"""The starting timestamp of the interval""" + leads: float + r"""The number of leads in the interval""" + diff --git a/src/dub/models/components/leadstoplinks.py b/src/dub/models/components/leadstoplinks.py index b3aa11f..6772d08 100644 --- a/src/dub/models/components/leadstoplinks.py +++ b/src/dub/models/components/leadstoplinks.py @@ -1,32 +1,46 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import pydantic +from typing import TypedDict +from typing_extensions import Annotated -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsTopLinks: - link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('link') }}) - r"""The unique ID of the short link - - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class LeadsTopLinksTypedDict(TypedDict): + link: str + r"""The unique ID of the short link""" + id: str r"""The unique ID of the short link""" - domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) + domain: str r"""The domain of the short link""" - key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }}) + key: str r"""The key of the short link""" - short_link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shortLink') }}) + short_link: str r"""The short link URL""" - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) + url: str r"""The destination URL of the short link""" - created_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('createdAt') }}) + created_at: str r"""The creation timestamp of the short link""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this link""" +class LeadsTopLinks(BaseModel): + link: Annotated[str, pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.")] + r"""The unique ID of the short link""" + id: str + r"""The unique ID of the short link""" + domain: str + r"""The domain of the short link""" + key: str + r"""The key of the short link""" + short_link: Annotated[str, pydantic.Field(alias="shortLink")] + r"""The short link URL""" + url: str + r"""The destination URL of the short link""" + created_at: Annotated[str, pydantic.Field(alias="createdAt")] + r"""The creation timestamp of the short link""" + leads: float + r"""The number of leads from this link""" + diff --git a/src/dub/models/components/leadstopurls.py b/src/dub/models/components/leadstopurls.py index 3387a1b..26d3495 100644 --- a/src/dub/models/components/leadstopurls.py +++ b/src/dub/models/components/leadstopurls.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LeadsTopUrls: - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class LeadsTopUrlsTypedDict(TypedDict): + url: str r"""The destination URL""" - leads: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads') }}) + leads: float r"""The number of leads from this URL""" +class LeadsTopUrls(BaseModel): + url: str + r"""The destination URL""" + leads: float + r"""The number of leads from this URL""" + diff --git a/src/dub/models/components/linkgeotargeting.py b/src/dub/models/components/linkgeotargeting.py index c03c446..b4a3167 100644 --- a/src/dub/models/components/linkgeotargeting.py +++ b/src/dub/models/components/linkgeotargeting.py @@ -1,265 +1,518 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LinkGeoTargeting: +class LinkGeoTargetingTypedDict(TypedDict): r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" - af: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AF'), 'exclude': lambda f: f is None }}) - al: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AL'), 'exclude': lambda f: f is None }}) - dz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DZ'), 'exclude': lambda f: f is None }}) - as_: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AS'), 'exclude': lambda f: f is None }}) - ad: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AD'), 'exclude': lambda f: f is None }}) - ao: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AO'), 'exclude': lambda f: f is None }}) - ai: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AI'), 'exclude': lambda f: f is None }}) - aq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AQ'), 'exclude': lambda f: f is None }}) - ag: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AG'), 'exclude': lambda f: f is None }}) - ar: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AR'), 'exclude': lambda f: f is None }}) - am: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AM'), 'exclude': lambda f: f is None }}) - aw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AW'), 'exclude': lambda f: f is None }}) - au: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AU'), 'exclude': lambda f: f is None }}) - at: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AT'), 'exclude': lambda f: f is None }}) - az: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AZ'), 'exclude': lambda f: f is None }}) - bs: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BS'), 'exclude': lambda f: f is None }}) - bh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BH'), 'exclude': lambda f: f is None }}) - bd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BD'), 'exclude': lambda f: f is None }}) - bb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BB'), 'exclude': lambda f: f is None }}) - by: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BY'), 'exclude': lambda f: f is None }}) - be: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BE'), 'exclude': lambda f: f is None }}) - bz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BZ'), 'exclude': lambda f: f is None }}) - bj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BJ'), 'exclude': lambda f: f is None }}) - bm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BM'), 'exclude': lambda f: f is None }}) - bt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BT'), 'exclude': lambda f: f is None }}) - bo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BO'), 'exclude': lambda f: f is None }}) - ba: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BA'), 'exclude': lambda f: f is None }}) - bw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BW'), 'exclude': lambda f: f is None }}) - bv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BV'), 'exclude': lambda f: f is None }}) - br: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BR'), 'exclude': lambda f: f is None }}) - io: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IO'), 'exclude': lambda f: f is None }}) - bn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BN'), 'exclude': lambda f: f is None }}) - bg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BG'), 'exclude': lambda f: f is None }}) - bf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BF'), 'exclude': lambda f: f is None }}) - bi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BI'), 'exclude': lambda f: f is None }}) - kh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KH'), 'exclude': lambda f: f is None }}) - cm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CM'), 'exclude': lambda f: f is None }}) - ca: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CA'), 'exclude': lambda f: f is None }}) - cv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CV'), 'exclude': lambda f: f is None }}) - ky: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KY'), 'exclude': lambda f: f is None }}) - cf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CF'), 'exclude': lambda f: f is None }}) - td: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TD'), 'exclude': lambda f: f is None }}) - cl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CL'), 'exclude': lambda f: f is None }}) - cn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CN'), 'exclude': lambda f: f is None }}) - cx: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CX'), 'exclude': lambda f: f is None }}) - cc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CC'), 'exclude': lambda f: f is None }}) - co: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CO'), 'exclude': lambda f: f is None }}) - km: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KM'), 'exclude': lambda f: f is None }}) - cg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CG'), 'exclude': lambda f: f is None }}) - cd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CD'), 'exclude': lambda f: f is None }}) - ck: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CK'), 'exclude': lambda f: f is None }}) - cr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CR'), 'exclude': lambda f: f is None }}) - ci: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CI'), 'exclude': lambda f: f is None }}) - hr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HR'), 'exclude': lambda f: f is None }}) - cu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CU'), 'exclude': lambda f: f is None }}) - cy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CY'), 'exclude': lambda f: f is None }}) - cz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CZ'), 'exclude': lambda f: f is None }}) - dk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DK'), 'exclude': lambda f: f is None }}) - dj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DJ'), 'exclude': lambda f: f is None }}) - dm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DM'), 'exclude': lambda f: f is None }}) - do: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DO'), 'exclude': lambda f: f is None }}) - ec: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EC'), 'exclude': lambda f: f is None }}) - eg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EG'), 'exclude': lambda f: f is None }}) - sv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SV'), 'exclude': lambda f: f is None }}) - gq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GQ'), 'exclude': lambda f: f is None }}) - er: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ER'), 'exclude': lambda f: f is None }}) - ee: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EE'), 'exclude': lambda f: f is None }}) - et: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ET'), 'exclude': lambda f: f is None }}) - fk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FK'), 'exclude': lambda f: f is None }}) - fo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FO'), 'exclude': lambda f: f is None }}) - fj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FJ'), 'exclude': lambda f: f is None }}) - fi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FI'), 'exclude': lambda f: f is None }}) - fr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FR'), 'exclude': lambda f: f is None }}) - gf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GF'), 'exclude': lambda f: f is None }}) - pf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PF'), 'exclude': lambda f: f is None }}) - tf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TF'), 'exclude': lambda f: f is None }}) - ga: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GA'), 'exclude': lambda f: f is None }}) - gm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GM'), 'exclude': lambda f: f is None }}) - ge: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GE'), 'exclude': lambda f: f is None }}) - de: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DE'), 'exclude': lambda f: f is None }}) - gh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GH'), 'exclude': lambda f: f is None }}) - gi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GI'), 'exclude': lambda f: f is None }}) - gr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GR'), 'exclude': lambda f: f is None }}) - gl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GL'), 'exclude': lambda f: f is None }}) - gd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GD'), 'exclude': lambda f: f is None }}) - gp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GP'), 'exclude': lambda f: f is None }}) - gu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GU'), 'exclude': lambda f: f is None }}) - gt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GT'), 'exclude': lambda f: f is None }}) - gn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GN'), 'exclude': lambda f: f is None }}) - gw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GW'), 'exclude': lambda f: f is None }}) - gy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GY'), 'exclude': lambda f: f is None }}) - ht: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HT'), 'exclude': lambda f: f is None }}) - hm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HM'), 'exclude': lambda f: f is None }}) - va: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VA'), 'exclude': lambda f: f is None }}) - hn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HN'), 'exclude': lambda f: f is None }}) - hk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HK'), 'exclude': lambda f: f is None }}) - hu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HU'), 'exclude': lambda f: f is None }}) - is_: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IS'), 'exclude': lambda f: f is None }}) - in_: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IN'), 'exclude': lambda f: f is None }}) - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ID'), 'exclude': lambda f: f is None }}) - ir: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IR'), 'exclude': lambda f: f is None }}) - iq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IQ'), 'exclude': lambda f: f is None }}) - ie: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IE'), 'exclude': lambda f: f is None }}) - il: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IL'), 'exclude': lambda f: f is None }}) - it: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IT'), 'exclude': lambda f: f is None }}) - jm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JM'), 'exclude': lambda f: f is None }}) - jp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JP'), 'exclude': lambda f: f is None }}) - jo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JO'), 'exclude': lambda f: f is None }}) - kz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KZ'), 'exclude': lambda f: f is None }}) - ke: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KE'), 'exclude': lambda f: f is None }}) - ki: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KI'), 'exclude': lambda f: f is None }}) - kp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KP'), 'exclude': lambda f: f is None }}) - kr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KR'), 'exclude': lambda f: f is None }}) - kw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KW'), 'exclude': lambda f: f is None }}) - kg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KG'), 'exclude': lambda f: f is None }}) - la: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LA'), 'exclude': lambda f: f is None }}) - lv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LV'), 'exclude': lambda f: f is None }}) - lb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LB'), 'exclude': lambda f: f is None }}) - ls: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LS'), 'exclude': lambda f: f is None }}) - lr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LR'), 'exclude': lambda f: f is None }}) - ly: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LY'), 'exclude': lambda f: f is None }}) - li: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LI'), 'exclude': lambda f: f is None }}) - lt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LT'), 'exclude': lambda f: f is None }}) - lu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LU'), 'exclude': lambda f: f is None }}) - mo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MO'), 'exclude': lambda f: f is None }}) - mg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MG'), 'exclude': lambda f: f is None }}) - mw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MW'), 'exclude': lambda f: f is None }}) - my: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MY'), 'exclude': lambda f: f is None }}) - mv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MV'), 'exclude': lambda f: f is None }}) - ml: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ML'), 'exclude': lambda f: f is None }}) - mt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MT'), 'exclude': lambda f: f is None }}) - mh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MH'), 'exclude': lambda f: f is None }}) - mq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MQ'), 'exclude': lambda f: f is None }}) - mr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MR'), 'exclude': lambda f: f is None }}) - mu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MU'), 'exclude': lambda f: f is None }}) - yt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('YT'), 'exclude': lambda f: f is None }}) - mx: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MX'), 'exclude': lambda f: f is None }}) - fm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FM'), 'exclude': lambda f: f is None }}) - md: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MD'), 'exclude': lambda f: f is None }}) - mc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MC'), 'exclude': lambda f: f is None }}) - mn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MN'), 'exclude': lambda f: f is None }}) - ms: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MS'), 'exclude': lambda f: f is None }}) - ma: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MA'), 'exclude': lambda f: f is None }}) - mz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MZ'), 'exclude': lambda f: f is None }}) - mm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MM'), 'exclude': lambda f: f is None }}) - na: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NA'), 'exclude': lambda f: f is None }}) - nr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NR'), 'exclude': lambda f: f is None }}) - np: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NP'), 'exclude': lambda f: f is None }}) - nl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NL'), 'exclude': lambda f: f is None }}) - nc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NC'), 'exclude': lambda f: f is None }}) - nz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NZ'), 'exclude': lambda f: f is None }}) - ni: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NI'), 'exclude': lambda f: f is None }}) - ne: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NE'), 'exclude': lambda f: f is None }}) - ng: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NG'), 'exclude': lambda f: f is None }}) - nu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NU'), 'exclude': lambda f: f is None }}) - nf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NF'), 'exclude': lambda f: f is None }}) - mk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MK'), 'exclude': lambda f: f is None }}) - mp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MP'), 'exclude': lambda f: f is None }}) - no: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NO'), 'exclude': lambda f: f is None }}) - om: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('OM'), 'exclude': lambda f: f is None }}) - pk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PK'), 'exclude': lambda f: f is None }}) - pw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PW'), 'exclude': lambda f: f is None }}) - ps: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PS'), 'exclude': lambda f: f is None }}) - pa: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PA'), 'exclude': lambda f: f is None }}) - pg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PG'), 'exclude': lambda f: f is None }}) - py: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PY'), 'exclude': lambda f: f is None }}) - pe: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PE'), 'exclude': lambda f: f is None }}) - ph: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PH'), 'exclude': lambda f: f is None }}) - pn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PN'), 'exclude': lambda f: f is None }}) - pl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PL'), 'exclude': lambda f: f is None }}) - pt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PT'), 'exclude': lambda f: f is None }}) - pr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PR'), 'exclude': lambda f: f is None }}) - qa: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('QA'), 'exclude': lambda f: f is None }}) - re: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RE'), 'exclude': lambda f: f is None }}) - ro: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RO'), 'exclude': lambda f: f is None }}) - ru: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RU'), 'exclude': lambda f: f is None }}) - rw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RW'), 'exclude': lambda f: f is None }}) - sh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SH'), 'exclude': lambda f: f is None }}) - kn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KN'), 'exclude': lambda f: f is None }}) - lc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LC'), 'exclude': lambda f: f is None }}) - pm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PM'), 'exclude': lambda f: f is None }}) - vc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VC'), 'exclude': lambda f: f is None }}) - ws: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('WS'), 'exclude': lambda f: f is None }}) - sm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SM'), 'exclude': lambda f: f is None }}) - st: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ST'), 'exclude': lambda f: f is None }}) - sa: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SA'), 'exclude': lambda f: f is None }}) - sn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SN'), 'exclude': lambda f: f is None }}) - sc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SC'), 'exclude': lambda f: f is None }}) - sl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SL'), 'exclude': lambda f: f is None }}) - sg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SG'), 'exclude': lambda f: f is None }}) - sk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SK'), 'exclude': lambda f: f is None }}) - si: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SI'), 'exclude': lambda f: f is None }}) - sb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SB'), 'exclude': lambda f: f is None }}) - so: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SO'), 'exclude': lambda f: f is None }}) - za: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ZA'), 'exclude': lambda f: f is None }}) - gs: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GS'), 'exclude': lambda f: f is None }}) - es: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ES'), 'exclude': lambda f: f is None }}) - lk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LK'), 'exclude': lambda f: f is None }}) - sd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SD'), 'exclude': lambda f: f is None }}) - sr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SR'), 'exclude': lambda f: f is None }}) - sj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SJ'), 'exclude': lambda f: f is None }}) - sz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SZ'), 'exclude': lambda f: f is None }}) - se: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SE'), 'exclude': lambda f: f is None }}) - ch: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CH'), 'exclude': lambda f: f is None }}) - sy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SY'), 'exclude': lambda f: f is None }}) - tw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TW'), 'exclude': lambda f: f is None }}) - tj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TJ'), 'exclude': lambda f: f is None }}) - tz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TZ'), 'exclude': lambda f: f is None }}) - th: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TH'), 'exclude': lambda f: f is None }}) - tl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TL'), 'exclude': lambda f: f is None }}) - tg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TG'), 'exclude': lambda f: f is None }}) - tk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TK'), 'exclude': lambda f: f is None }}) - to: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TO'), 'exclude': lambda f: f is None }}) - tt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TT'), 'exclude': lambda f: f is None }}) - tn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TN'), 'exclude': lambda f: f is None }}) - tr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TR'), 'exclude': lambda f: f is None }}) - tm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TM'), 'exclude': lambda f: f is None }}) - tc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TC'), 'exclude': lambda f: f is None }}) - tv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TV'), 'exclude': lambda f: f is None }}) - ug: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UG'), 'exclude': lambda f: f is None }}) - ua: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UA'), 'exclude': lambda f: f is None }}) - ae: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AE'), 'exclude': lambda f: f is None }}) - gb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GB'), 'exclude': lambda f: f is None }}) - us: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('US'), 'exclude': lambda f: f is None }}) - um: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UM'), 'exclude': lambda f: f is None }}) - uy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UY'), 'exclude': lambda f: f is None }}) - uz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UZ'), 'exclude': lambda f: f is None }}) - vu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VU'), 'exclude': lambda f: f is None }}) - ve: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VE'), 'exclude': lambda f: f is None }}) - vn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VN'), 'exclude': lambda f: f is None }}) - vg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VG'), 'exclude': lambda f: f is None }}) - vi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VI'), 'exclude': lambda f: f is None }}) - wf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('WF'), 'exclude': lambda f: f is None }}) - eh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EH'), 'exclude': lambda f: f is None }}) - ye: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('YE'), 'exclude': lambda f: f is None }}) - zm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ZM'), 'exclude': lambda f: f is None }}) - zw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ZW'), 'exclude': lambda f: f is None }}) - ax: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AX'), 'exclude': lambda f: f is None }}) - bq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BQ'), 'exclude': lambda f: f is None }}) - cw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CW'), 'exclude': lambda f: f is None }}) - gg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GG'), 'exclude': lambda f: f is None }}) - im: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IM'), 'exclude': lambda f: f is None }}) - je: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JE'), 'exclude': lambda f: f is None }}) - me: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ME'), 'exclude': lambda f: f is None }}) - bl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BL'), 'exclude': lambda f: f is None }}) - mf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MF'), 'exclude': lambda f: f is None }}) - rs: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RS'), 'exclude': lambda f: f is None }}) - sx: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SX'), 'exclude': lambda f: f is None }}) - ss: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SS'), 'exclude': lambda f: f is None }}) - xk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('XK'), 'exclude': lambda f: f is None }}) + + af: NotRequired[str] + al: NotRequired[str] + dz: NotRequired[str] + as_: NotRequired[str] + ad: NotRequired[str] + ao: NotRequired[str] + ai: NotRequired[str] + aq: NotRequired[str] + ag: NotRequired[str] + ar: NotRequired[str] + am: NotRequired[str] + aw: NotRequired[str] + au: NotRequired[str] + at: NotRequired[str] + az: NotRequired[str] + bs: NotRequired[str] + bh: NotRequired[str] + bd: NotRequired[str] + bb: NotRequired[str] + by: NotRequired[str] + be: NotRequired[str] + bz: NotRequired[str] + bj: NotRequired[str] + bm: NotRequired[str] + bt: NotRequired[str] + bo: NotRequired[str] + ba: NotRequired[str] + bw: NotRequired[str] + bv: NotRequired[str] + br: NotRequired[str] + io: NotRequired[str] + bn: NotRequired[str] + bg: NotRequired[str] + bf: NotRequired[str] + bi: NotRequired[str] + kh: NotRequired[str] + cm: NotRequired[str] + ca: NotRequired[str] + cv: NotRequired[str] + ky: NotRequired[str] + cf: NotRequired[str] + td: NotRequired[str] + cl: NotRequired[str] + cn: NotRequired[str] + cx: NotRequired[str] + cc: NotRequired[str] + co: NotRequired[str] + km: NotRequired[str] + cg: NotRequired[str] + cd: NotRequired[str] + ck: NotRequired[str] + cr: NotRequired[str] + ci: NotRequired[str] + hr: NotRequired[str] + cu: NotRequired[str] + cy: NotRequired[str] + cz: NotRequired[str] + dk: NotRequired[str] + dj: NotRequired[str] + dm: NotRequired[str] + do: NotRequired[str] + ec: NotRequired[str] + eg: NotRequired[str] + sv: NotRequired[str] + gq: NotRequired[str] + er: NotRequired[str] + ee: NotRequired[str] + et: NotRequired[str] + fk: NotRequired[str] + fo: NotRequired[str] + fj: NotRequired[str] + fi: NotRequired[str] + fr: NotRequired[str] + gf: NotRequired[str] + pf: NotRequired[str] + tf: NotRequired[str] + ga: NotRequired[str] + gm: NotRequired[str] + ge: NotRequired[str] + de: NotRequired[str] + gh: NotRequired[str] + gi: NotRequired[str] + gr: NotRequired[str] + gl: NotRequired[str] + gd: NotRequired[str] + gp: NotRequired[str] + gu: NotRequired[str] + gt: NotRequired[str] + gn: NotRequired[str] + gw: NotRequired[str] + gy: NotRequired[str] + ht: NotRequired[str] + hm: NotRequired[str] + va: NotRequired[str] + hn: NotRequired[str] + hk: NotRequired[str] + hu: NotRequired[str] + is_: NotRequired[str] + in_: NotRequired[str] + id: NotRequired[str] + ir: NotRequired[str] + iq: NotRequired[str] + ie: NotRequired[str] + il: NotRequired[str] + it: NotRequired[str] + jm: NotRequired[str] + jp: NotRequired[str] + jo: NotRequired[str] + kz: NotRequired[str] + ke: NotRequired[str] + ki: NotRequired[str] + kp: NotRequired[str] + kr: NotRequired[str] + kw: NotRequired[str] + kg: NotRequired[str] + la: NotRequired[str] + lv: NotRequired[str] + lb: NotRequired[str] + ls: NotRequired[str] + lr: NotRequired[str] + ly: NotRequired[str] + li: NotRequired[str] + lt: NotRequired[str] + lu: NotRequired[str] + mo: NotRequired[str] + mg: NotRequired[str] + mw: NotRequired[str] + my: NotRequired[str] + mv: NotRequired[str] + ml: NotRequired[str] + mt: NotRequired[str] + mh: NotRequired[str] + mq: NotRequired[str] + mr: NotRequired[str] + mu: NotRequired[str] + yt: NotRequired[str] + mx: NotRequired[str] + fm: NotRequired[str] + md: NotRequired[str] + mc: NotRequired[str] + mn: NotRequired[str] + ms: NotRequired[str] + ma: NotRequired[str] + mz: NotRequired[str] + mm: NotRequired[str] + na: NotRequired[str] + nr: NotRequired[str] + np: NotRequired[str] + nl: NotRequired[str] + nc: NotRequired[str] + nz: NotRequired[str] + ni: NotRequired[str] + ne: NotRequired[str] + ng: NotRequired[str] + nu: NotRequired[str] + nf: NotRequired[str] + mk: NotRequired[str] + mp: NotRequired[str] + no: NotRequired[str] + om: NotRequired[str] + pk: NotRequired[str] + pw: NotRequired[str] + ps: NotRequired[str] + pa: NotRequired[str] + pg: NotRequired[str] + py: NotRequired[str] + pe: NotRequired[str] + ph: NotRequired[str] + pn: NotRequired[str] + pl: NotRequired[str] + pt: NotRequired[str] + pr: NotRequired[str] + qa: NotRequired[str] + re: NotRequired[str] + ro: NotRequired[str] + ru: NotRequired[str] + rw: NotRequired[str] + sh: NotRequired[str] + kn: NotRequired[str] + lc: NotRequired[str] + pm: NotRequired[str] + vc: NotRequired[str] + ws: NotRequired[str] + sm: NotRequired[str] + st: NotRequired[str] + sa: NotRequired[str] + sn: NotRequired[str] + sc: NotRequired[str] + sl: NotRequired[str] + sg: NotRequired[str] + sk: NotRequired[str] + si: NotRequired[str] + sb: NotRequired[str] + so: NotRequired[str] + za: NotRequired[str] + gs: NotRequired[str] + es: NotRequired[str] + lk: NotRequired[str] + sd: NotRequired[str] + sr: NotRequired[str] + sj: NotRequired[str] + sz: NotRequired[str] + se: NotRequired[str] + ch: NotRequired[str] + sy: NotRequired[str] + tw: NotRequired[str] + tj: NotRequired[str] + tz: NotRequired[str] + th: NotRequired[str] + tl: NotRequired[str] + tg: NotRequired[str] + tk: NotRequired[str] + to: NotRequired[str] + tt: NotRequired[str] + tn: NotRequired[str] + tr: NotRequired[str] + tm: NotRequired[str] + tc: NotRequired[str] + tv: NotRequired[str] + ug: NotRequired[str] + ua: NotRequired[str] + ae: NotRequired[str] + gb: NotRequired[str] + us: NotRequired[str] + um: NotRequired[str] + uy: NotRequired[str] + uz: NotRequired[str] + vu: NotRequired[str] + ve: NotRequired[str] + vn: NotRequired[str] + vg: NotRequired[str] + vi: NotRequired[str] + wf: NotRequired[str] + eh: NotRequired[str] + ye: NotRequired[str] + zm: NotRequired[str] + zw: NotRequired[str] + ax: NotRequired[str] + bq: NotRequired[str] + cw: NotRequired[str] + gg: NotRequired[str] + im: NotRequired[str] + je: NotRequired[str] + me: NotRequired[str] + bl: NotRequired[str] + mf: NotRequired[str] + rs: NotRequired[str] + sx: NotRequired[str] + ss: NotRequired[str] + xk: NotRequired[str] +class LinkGeoTargeting(BaseModel): + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" + + af: Annotated[Optional[str], pydantic.Field(alias="AF")] = None + al: Annotated[Optional[str], pydantic.Field(alias="AL")] = None + dz: Annotated[Optional[str], pydantic.Field(alias="DZ")] = None + as_: Annotated[Optional[str], pydantic.Field(alias="AS")] = None + ad: Annotated[Optional[str], pydantic.Field(alias="AD")] = None + ao: Annotated[Optional[str], pydantic.Field(alias="AO")] = None + ai: Annotated[Optional[str], pydantic.Field(alias="AI")] = None + aq: Annotated[Optional[str], pydantic.Field(alias="AQ")] = None + ag: Annotated[Optional[str], pydantic.Field(alias="AG")] = None + ar: Annotated[Optional[str], pydantic.Field(alias="AR")] = None + am: Annotated[Optional[str], pydantic.Field(alias="AM")] = None + aw: Annotated[Optional[str], pydantic.Field(alias="AW")] = None + au: Annotated[Optional[str], pydantic.Field(alias="AU")] = None + at: Annotated[Optional[str], pydantic.Field(alias="AT")] = None + az: Annotated[Optional[str], pydantic.Field(alias="AZ")] = None + bs: Annotated[Optional[str], pydantic.Field(alias="BS")] = None + bh: Annotated[Optional[str], pydantic.Field(alias="BH")] = None + bd: Annotated[Optional[str], pydantic.Field(alias="BD")] = None + bb: Annotated[Optional[str], pydantic.Field(alias="BB")] = None + by: Annotated[Optional[str], pydantic.Field(alias="BY")] = None + be: Annotated[Optional[str], pydantic.Field(alias="BE")] = None + bz: Annotated[Optional[str], pydantic.Field(alias="BZ")] = None + bj: Annotated[Optional[str], pydantic.Field(alias="BJ")] = None + bm: Annotated[Optional[str], pydantic.Field(alias="BM")] = None + bt: Annotated[Optional[str], pydantic.Field(alias="BT")] = None + bo: Annotated[Optional[str], pydantic.Field(alias="BO")] = None + ba: Annotated[Optional[str], pydantic.Field(alias="BA")] = None + bw: Annotated[Optional[str], pydantic.Field(alias="BW")] = None + bv: Annotated[Optional[str], pydantic.Field(alias="BV")] = None + br: Annotated[Optional[str], pydantic.Field(alias="BR")] = None + io: Annotated[Optional[str], pydantic.Field(alias="IO")] = None + bn: Annotated[Optional[str], pydantic.Field(alias="BN")] = None + bg: Annotated[Optional[str], pydantic.Field(alias="BG")] = None + bf: Annotated[Optional[str], pydantic.Field(alias="BF")] = None + bi: Annotated[Optional[str], pydantic.Field(alias="BI")] = None + kh: Annotated[Optional[str], pydantic.Field(alias="KH")] = None + cm: Annotated[Optional[str], pydantic.Field(alias="CM")] = None + ca: Annotated[Optional[str], pydantic.Field(alias="CA")] = None + cv: Annotated[Optional[str], pydantic.Field(alias="CV")] = None + ky: Annotated[Optional[str], pydantic.Field(alias="KY")] = None + cf: Annotated[Optional[str], pydantic.Field(alias="CF")] = None + td: Annotated[Optional[str], pydantic.Field(alias="TD")] = None + cl: Annotated[Optional[str], pydantic.Field(alias="CL")] = None + cn: Annotated[Optional[str], pydantic.Field(alias="CN")] = None + cx: Annotated[Optional[str], pydantic.Field(alias="CX")] = None + cc: Annotated[Optional[str], pydantic.Field(alias="CC")] = None + co: Annotated[Optional[str], pydantic.Field(alias="CO")] = None + km: Annotated[Optional[str], pydantic.Field(alias="KM")] = None + cg: Annotated[Optional[str], pydantic.Field(alias="CG")] = None + cd: Annotated[Optional[str], pydantic.Field(alias="CD")] = None + ck: Annotated[Optional[str], pydantic.Field(alias="CK")] = None + cr: Annotated[Optional[str], pydantic.Field(alias="CR")] = None + ci: Annotated[Optional[str], pydantic.Field(alias="CI")] = None + hr: Annotated[Optional[str], pydantic.Field(alias="HR")] = None + cu: Annotated[Optional[str], pydantic.Field(alias="CU")] = None + cy: Annotated[Optional[str], pydantic.Field(alias="CY")] = None + cz: Annotated[Optional[str], pydantic.Field(alias="CZ")] = None + dk: Annotated[Optional[str], pydantic.Field(alias="DK")] = None + dj: Annotated[Optional[str], pydantic.Field(alias="DJ")] = None + dm: Annotated[Optional[str], pydantic.Field(alias="DM")] = None + do: Annotated[Optional[str], pydantic.Field(alias="DO")] = None + ec: Annotated[Optional[str], pydantic.Field(alias="EC")] = None + eg: Annotated[Optional[str], pydantic.Field(alias="EG")] = None + sv: Annotated[Optional[str], pydantic.Field(alias="SV")] = None + gq: Annotated[Optional[str], pydantic.Field(alias="GQ")] = None + er: Annotated[Optional[str], pydantic.Field(alias="ER")] = None + ee: Annotated[Optional[str], pydantic.Field(alias="EE")] = None + et: Annotated[Optional[str], pydantic.Field(alias="ET")] = None + fk: Annotated[Optional[str], pydantic.Field(alias="FK")] = None + fo: Annotated[Optional[str], pydantic.Field(alias="FO")] = None + fj: Annotated[Optional[str], pydantic.Field(alias="FJ")] = None + fi: Annotated[Optional[str], pydantic.Field(alias="FI")] = None + fr: Annotated[Optional[str], pydantic.Field(alias="FR")] = None + gf: Annotated[Optional[str], pydantic.Field(alias="GF")] = None + pf: Annotated[Optional[str], pydantic.Field(alias="PF")] = None + tf: Annotated[Optional[str], pydantic.Field(alias="TF")] = None + ga: Annotated[Optional[str], pydantic.Field(alias="GA")] = None + gm: Annotated[Optional[str], pydantic.Field(alias="GM")] = None + ge: Annotated[Optional[str], pydantic.Field(alias="GE")] = None + de: Annotated[Optional[str], pydantic.Field(alias="DE")] = None + gh: Annotated[Optional[str], pydantic.Field(alias="GH")] = None + gi: Annotated[Optional[str], pydantic.Field(alias="GI")] = None + gr: Annotated[Optional[str], pydantic.Field(alias="GR")] = None + gl: Annotated[Optional[str], pydantic.Field(alias="GL")] = None + gd: Annotated[Optional[str], pydantic.Field(alias="GD")] = None + gp: Annotated[Optional[str], pydantic.Field(alias="GP")] = None + gu: Annotated[Optional[str], pydantic.Field(alias="GU")] = None + gt: Annotated[Optional[str], pydantic.Field(alias="GT")] = None + gn: Annotated[Optional[str], pydantic.Field(alias="GN")] = None + gw: Annotated[Optional[str], pydantic.Field(alias="GW")] = None + gy: Annotated[Optional[str], pydantic.Field(alias="GY")] = None + ht: Annotated[Optional[str], pydantic.Field(alias="HT")] = None + hm: Annotated[Optional[str], pydantic.Field(alias="HM")] = None + va: Annotated[Optional[str], pydantic.Field(alias="VA")] = None + hn: Annotated[Optional[str], pydantic.Field(alias="HN")] = None + hk: Annotated[Optional[str], pydantic.Field(alias="HK")] = None + hu: Annotated[Optional[str], pydantic.Field(alias="HU")] = None + is_: Annotated[Optional[str], pydantic.Field(alias="IS")] = None + in_: Annotated[Optional[str], pydantic.Field(alias="IN")] = None + id: Annotated[Optional[str], pydantic.Field(alias="ID")] = None + ir: Annotated[Optional[str], pydantic.Field(alias="IR")] = None + iq: Annotated[Optional[str], pydantic.Field(alias="IQ")] = None + ie: Annotated[Optional[str], pydantic.Field(alias="IE")] = None + il: Annotated[Optional[str], pydantic.Field(alias="IL")] = None + it: Annotated[Optional[str], pydantic.Field(alias="IT")] = None + jm: Annotated[Optional[str], pydantic.Field(alias="JM")] = None + jp: Annotated[Optional[str], pydantic.Field(alias="JP")] = None + jo: Annotated[Optional[str], pydantic.Field(alias="JO")] = None + kz: Annotated[Optional[str], pydantic.Field(alias="KZ")] = None + ke: Annotated[Optional[str], pydantic.Field(alias="KE")] = None + ki: Annotated[Optional[str], pydantic.Field(alias="KI")] = None + kp: Annotated[Optional[str], pydantic.Field(alias="KP")] = None + kr: Annotated[Optional[str], pydantic.Field(alias="KR")] = None + kw: Annotated[Optional[str], pydantic.Field(alias="KW")] = None + kg: Annotated[Optional[str], pydantic.Field(alias="KG")] = None + la: Annotated[Optional[str], pydantic.Field(alias="LA")] = None + lv: Annotated[Optional[str], pydantic.Field(alias="LV")] = None + lb: Annotated[Optional[str], pydantic.Field(alias="LB")] = None + ls: Annotated[Optional[str], pydantic.Field(alias="LS")] = None + lr: Annotated[Optional[str], pydantic.Field(alias="LR")] = None + ly: Annotated[Optional[str], pydantic.Field(alias="LY")] = None + li: Annotated[Optional[str], pydantic.Field(alias="LI")] = None + lt: Annotated[Optional[str], pydantic.Field(alias="LT")] = None + lu: Annotated[Optional[str], pydantic.Field(alias="LU")] = None + mo: Annotated[Optional[str], pydantic.Field(alias="MO")] = None + mg: Annotated[Optional[str], pydantic.Field(alias="MG")] = None + mw: Annotated[Optional[str], pydantic.Field(alias="MW")] = None + my: Annotated[Optional[str], pydantic.Field(alias="MY")] = None + mv: Annotated[Optional[str], pydantic.Field(alias="MV")] = None + ml: Annotated[Optional[str], pydantic.Field(alias="ML")] = None + mt: Annotated[Optional[str], pydantic.Field(alias="MT")] = None + mh: Annotated[Optional[str], pydantic.Field(alias="MH")] = None + mq: Annotated[Optional[str], pydantic.Field(alias="MQ")] = None + mr: Annotated[Optional[str], pydantic.Field(alias="MR")] = None + mu: Annotated[Optional[str], pydantic.Field(alias="MU")] = None + yt: Annotated[Optional[str], pydantic.Field(alias="YT")] = None + mx: Annotated[Optional[str], pydantic.Field(alias="MX")] = None + fm: Annotated[Optional[str], pydantic.Field(alias="FM")] = None + md: Annotated[Optional[str], pydantic.Field(alias="MD")] = None + mc: Annotated[Optional[str], pydantic.Field(alias="MC")] = None + mn: Annotated[Optional[str], pydantic.Field(alias="MN")] = None + ms: Annotated[Optional[str], pydantic.Field(alias="MS")] = None + ma: Annotated[Optional[str], pydantic.Field(alias="MA")] = None + mz: Annotated[Optional[str], pydantic.Field(alias="MZ")] = None + mm: Annotated[Optional[str], pydantic.Field(alias="MM")] = None + na: Annotated[Optional[str], pydantic.Field(alias="NA")] = None + nr: Annotated[Optional[str], pydantic.Field(alias="NR")] = None + np: Annotated[Optional[str], pydantic.Field(alias="NP")] = None + nl: Annotated[Optional[str], pydantic.Field(alias="NL")] = None + nc: Annotated[Optional[str], pydantic.Field(alias="NC")] = None + nz: Annotated[Optional[str], pydantic.Field(alias="NZ")] = None + ni: Annotated[Optional[str], pydantic.Field(alias="NI")] = None + ne: Annotated[Optional[str], pydantic.Field(alias="NE")] = None + ng: Annotated[Optional[str], pydantic.Field(alias="NG")] = None + nu: Annotated[Optional[str], pydantic.Field(alias="NU")] = None + nf: Annotated[Optional[str], pydantic.Field(alias="NF")] = None + mk: Annotated[Optional[str], pydantic.Field(alias="MK")] = None + mp: Annotated[Optional[str], pydantic.Field(alias="MP")] = None + no: Annotated[Optional[str], pydantic.Field(alias="NO")] = None + om: Annotated[Optional[str], pydantic.Field(alias="OM")] = None + pk: Annotated[Optional[str], pydantic.Field(alias="PK")] = None + pw: Annotated[Optional[str], pydantic.Field(alias="PW")] = None + ps: Annotated[Optional[str], pydantic.Field(alias="PS")] = None + pa: Annotated[Optional[str], pydantic.Field(alias="PA")] = None + pg: Annotated[Optional[str], pydantic.Field(alias="PG")] = None + py: Annotated[Optional[str], pydantic.Field(alias="PY")] = None + pe: Annotated[Optional[str], pydantic.Field(alias="PE")] = None + ph: Annotated[Optional[str], pydantic.Field(alias="PH")] = None + pn: Annotated[Optional[str], pydantic.Field(alias="PN")] = None + pl: Annotated[Optional[str], pydantic.Field(alias="PL")] = None + pt: Annotated[Optional[str], pydantic.Field(alias="PT")] = None + pr: Annotated[Optional[str], pydantic.Field(alias="PR")] = None + qa: Annotated[Optional[str], pydantic.Field(alias="QA")] = None + re: Annotated[Optional[str], pydantic.Field(alias="RE")] = None + ro: Annotated[Optional[str], pydantic.Field(alias="RO")] = None + ru: Annotated[Optional[str], pydantic.Field(alias="RU")] = None + rw: Annotated[Optional[str], pydantic.Field(alias="RW")] = None + sh: Annotated[Optional[str], pydantic.Field(alias="SH")] = None + kn: Annotated[Optional[str], pydantic.Field(alias="KN")] = None + lc: Annotated[Optional[str], pydantic.Field(alias="LC")] = None + pm: Annotated[Optional[str], pydantic.Field(alias="PM")] = None + vc: Annotated[Optional[str], pydantic.Field(alias="VC")] = None + ws: Annotated[Optional[str], pydantic.Field(alias="WS")] = None + sm: Annotated[Optional[str], pydantic.Field(alias="SM")] = None + st: Annotated[Optional[str], pydantic.Field(alias="ST")] = None + sa: Annotated[Optional[str], pydantic.Field(alias="SA")] = None + sn: Annotated[Optional[str], pydantic.Field(alias="SN")] = None + sc: Annotated[Optional[str], pydantic.Field(alias="SC")] = None + sl: Annotated[Optional[str], pydantic.Field(alias="SL")] = None + sg: Annotated[Optional[str], pydantic.Field(alias="SG")] = None + sk: Annotated[Optional[str], pydantic.Field(alias="SK")] = None + si: Annotated[Optional[str], pydantic.Field(alias="SI")] = None + sb: Annotated[Optional[str], pydantic.Field(alias="SB")] = None + so: Annotated[Optional[str], pydantic.Field(alias="SO")] = None + za: Annotated[Optional[str], pydantic.Field(alias="ZA")] = None + gs: Annotated[Optional[str], pydantic.Field(alias="GS")] = None + es: Annotated[Optional[str], pydantic.Field(alias="ES")] = None + lk: Annotated[Optional[str], pydantic.Field(alias="LK")] = None + sd: Annotated[Optional[str], pydantic.Field(alias="SD")] = None + sr: Annotated[Optional[str], pydantic.Field(alias="SR")] = None + sj: Annotated[Optional[str], pydantic.Field(alias="SJ")] = None + sz: Annotated[Optional[str], pydantic.Field(alias="SZ")] = None + se: Annotated[Optional[str], pydantic.Field(alias="SE")] = None + ch: Annotated[Optional[str], pydantic.Field(alias="CH")] = None + sy: Annotated[Optional[str], pydantic.Field(alias="SY")] = None + tw: Annotated[Optional[str], pydantic.Field(alias="TW")] = None + tj: Annotated[Optional[str], pydantic.Field(alias="TJ")] = None + tz: Annotated[Optional[str], pydantic.Field(alias="TZ")] = None + th: Annotated[Optional[str], pydantic.Field(alias="TH")] = None + tl: Annotated[Optional[str], pydantic.Field(alias="TL")] = None + tg: Annotated[Optional[str], pydantic.Field(alias="TG")] = None + tk: Annotated[Optional[str], pydantic.Field(alias="TK")] = None + to: Annotated[Optional[str], pydantic.Field(alias="TO")] = None + tt: Annotated[Optional[str], pydantic.Field(alias="TT")] = None + tn: Annotated[Optional[str], pydantic.Field(alias="TN")] = None + tr: Annotated[Optional[str], pydantic.Field(alias="TR")] = None + tm: Annotated[Optional[str], pydantic.Field(alias="TM")] = None + tc: Annotated[Optional[str], pydantic.Field(alias="TC")] = None + tv: Annotated[Optional[str], pydantic.Field(alias="TV")] = None + ug: Annotated[Optional[str], pydantic.Field(alias="UG")] = None + ua: Annotated[Optional[str], pydantic.Field(alias="UA")] = None + ae: Annotated[Optional[str], pydantic.Field(alias="AE")] = None + gb: Annotated[Optional[str], pydantic.Field(alias="GB")] = None + us: Annotated[Optional[str], pydantic.Field(alias="US")] = None + um: Annotated[Optional[str], pydantic.Field(alias="UM")] = None + uy: Annotated[Optional[str], pydantic.Field(alias="UY")] = None + uz: Annotated[Optional[str], pydantic.Field(alias="UZ")] = None + vu: Annotated[Optional[str], pydantic.Field(alias="VU")] = None + ve: Annotated[Optional[str], pydantic.Field(alias="VE")] = None + vn: Annotated[Optional[str], pydantic.Field(alias="VN")] = None + vg: Annotated[Optional[str], pydantic.Field(alias="VG")] = None + vi: Annotated[Optional[str], pydantic.Field(alias="VI")] = None + wf: Annotated[Optional[str], pydantic.Field(alias="WF")] = None + eh: Annotated[Optional[str], pydantic.Field(alias="EH")] = None + ye: Annotated[Optional[str], pydantic.Field(alias="YE")] = None + zm: Annotated[Optional[str], pydantic.Field(alias="ZM")] = None + zw: Annotated[Optional[str], pydantic.Field(alias="ZW")] = None + ax: Annotated[Optional[str], pydantic.Field(alias="AX")] = None + bq: Annotated[Optional[str], pydantic.Field(alias="BQ")] = None + cw: Annotated[Optional[str], pydantic.Field(alias="CW")] = None + gg: Annotated[Optional[str], pydantic.Field(alias="GG")] = None + im: Annotated[Optional[str], pydantic.Field(alias="IM")] = None + je: Annotated[Optional[str], pydantic.Field(alias="JE")] = None + me: Annotated[Optional[str], pydantic.Field(alias="ME")] = None + bl: Annotated[Optional[str], pydantic.Field(alias="BL")] = None + mf: Annotated[Optional[str], pydantic.Field(alias="MF")] = None + rs: Annotated[Optional[str], pydantic.Field(alias="RS")] = None + sx: Annotated[Optional[str], pydantic.Field(alias="SX")] = None + ss: Annotated[Optional[str], pydantic.Field(alias="SS")] = None + xk: Annotated[Optional[str], pydantic.Field(alias="XK")] = None + diff --git a/src/dub/models/components/linkschema.py b/src/dub/models/components/linkschema.py index e6584b0..58efe46 100644 --- a/src/dub/models/components/linkschema.py +++ b/src/dub/models/components/linkschema.py @@ -1,357 +1,707 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from .tagschema import TagSchema -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import List, Optional +from .tagschema import TagSchema, TagSchemaTypedDict +from dub.types import BaseModel, Nullable +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Geo: +class GeoTypedDict(TypedDict): r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. Learn more: https://d.to/geo""" - af: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AF'), 'exclude': lambda f: f is None }}) - al: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AL'), 'exclude': lambda f: f is None }}) - dz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DZ'), 'exclude': lambda f: f is None }}) - as_: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AS'), 'exclude': lambda f: f is None }}) - ad: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AD'), 'exclude': lambda f: f is None }}) - ao: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AO'), 'exclude': lambda f: f is None }}) - ai: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AI'), 'exclude': lambda f: f is None }}) - aq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AQ'), 'exclude': lambda f: f is None }}) - ag: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AG'), 'exclude': lambda f: f is None }}) - ar: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AR'), 'exclude': lambda f: f is None }}) - am: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AM'), 'exclude': lambda f: f is None }}) - aw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AW'), 'exclude': lambda f: f is None }}) - au: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AU'), 'exclude': lambda f: f is None }}) - at: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AT'), 'exclude': lambda f: f is None }}) - az: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AZ'), 'exclude': lambda f: f is None }}) - bs: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BS'), 'exclude': lambda f: f is None }}) - bh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BH'), 'exclude': lambda f: f is None }}) - bd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BD'), 'exclude': lambda f: f is None }}) - bb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BB'), 'exclude': lambda f: f is None }}) - by: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BY'), 'exclude': lambda f: f is None }}) - be: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BE'), 'exclude': lambda f: f is None }}) - bz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BZ'), 'exclude': lambda f: f is None }}) - bj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BJ'), 'exclude': lambda f: f is None }}) - bm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BM'), 'exclude': lambda f: f is None }}) - bt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BT'), 'exclude': lambda f: f is None }}) - bo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BO'), 'exclude': lambda f: f is None }}) - ba: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BA'), 'exclude': lambda f: f is None }}) - bw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BW'), 'exclude': lambda f: f is None }}) - bv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BV'), 'exclude': lambda f: f is None }}) - br: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BR'), 'exclude': lambda f: f is None }}) - io: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IO'), 'exclude': lambda f: f is None }}) - bn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BN'), 'exclude': lambda f: f is None }}) - bg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BG'), 'exclude': lambda f: f is None }}) - bf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BF'), 'exclude': lambda f: f is None }}) - bi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BI'), 'exclude': lambda f: f is None }}) - kh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KH'), 'exclude': lambda f: f is None }}) - cm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CM'), 'exclude': lambda f: f is None }}) - ca: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CA'), 'exclude': lambda f: f is None }}) - cv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CV'), 'exclude': lambda f: f is None }}) - ky: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KY'), 'exclude': lambda f: f is None }}) - cf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CF'), 'exclude': lambda f: f is None }}) - td: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TD'), 'exclude': lambda f: f is None }}) - cl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CL'), 'exclude': lambda f: f is None }}) - cn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CN'), 'exclude': lambda f: f is None }}) - cx: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CX'), 'exclude': lambda f: f is None }}) - cc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CC'), 'exclude': lambda f: f is None }}) - co: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CO'), 'exclude': lambda f: f is None }}) - km: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KM'), 'exclude': lambda f: f is None }}) - cg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CG'), 'exclude': lambda f: f is None }}) - cd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CD'), 'exclude': lambda f: f is None }}) - ck: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CK'), 'exclude': lambda f: f is None }}) - cr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CR'), 'exclude': lambda f: f is None }}) - ci: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CI'), 'exclude': lambda f: f is None }}) - hr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HR'), 'exclude': lambda f: f is None }}) - cu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CU'), 'exclude': lambda f: f is None }}) - cy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CY'), 'exclude': lambda f: f is None }}) - cz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CZ'), 'exclude': lambda f: f is None }}) - dk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DK'), 'exclude': lambda f: f is None }}) - dj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DJ'), 'exclude': lambda f: f is None }}) - dm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DM'), 'exclude': lambda f: f is None }}) - do: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DO'), 'exclude': lambda f: f is None }}) - ec: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EC'), 'exclude': lambda f: f is None }}) - eg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EG'), 'exclude': lambda f: f is None }}) - sv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SV'), 'exclude': lambda f: f is None }}) - gq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GQ'), 'exclude': lambda f: f is None }}) - er: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ER'), 'exclude': lambda f: f is None }}) - ee: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EE'), 'exclude': lambda f: f is None }}) - et: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ET'), 'exclude': lambda f: f is None }}) - fk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FK'), 'exclude': lambda f: f is None }}) - fo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FO'), 'exclude': lambda f: f is None }}) - fj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FJ'), 'exclude': lambda f: f is None }}) - fi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FI'), 'exclude': lambda f: f is None }}) - fr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FR'), 'exclude': lambda f: f is None }}) - gf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GF'), 'exclude': lambda f: f is None }}) - pf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PF'), 'exclude': lambda f: f is None }}) - tf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TF'), 'exclude': lambda f: f is None }}) - ga: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GA'), 'exclude': lambda f: f is None }}) - gm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GM'), 'exclude': lambda f: f is None }}) - ge: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GE'), 'exclude': lambda f: f is None }}) - de: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('DE'), 'exclude': lambda f: f is None }}) - gh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GH'), 'exclude': lambda f: f is None }}) - gi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GI'), 'exclude': lambda f: f is None }}) - gr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GR'), 'exclude': lambda f: f is None }}) - gl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GL'), 'exclude': lambda f: f is None }}) - gd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GD'), 'exclude': lambda f: f is None }}) - gp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GP'), 'exclude': lambda f: f is None }}) - gu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GU'), 'exclude': lambda f: f is None }}) - gt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GT'), 'exclude': lambda f: f is None }}) - gn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GN'), 'exclude': lambda f: f is None }}) - gw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GW'), 'exclude': lambda f: f is None }}) - gy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GY'), 'exclude': lambda f: f is None }}) - ht: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HT'), 'exclude': lambda f: f is None }}) - hm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HM'), 'exclude': lambda f: f is None }}) - va: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VA'), 'exclude': lambda f: f is None }}) - hn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HN'), 'exclude': lambda f: f is None }}) - hk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HK'), 'exclude': lambda f: f is None }}) - hu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('HU'), 'exclude': lambda f: f is None }}) - is_: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IS'), 'exclude': lambda f: f is None }}) - in_: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IN'), 'exclude': lambda f: f is None }}) - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ID'), 'exclude': lambda f: f is None }}) - ir: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IR'), 'exclude': lambda f: f is None }}) - iq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IQ'), 'exclude': lambda f: f is None }}) - ie: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IE'), 'exclude': lambda f: f is None }}) - il: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IL'), 'exclude': lambda f: f is None }}) - it: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IT'), 'exclude': lambda f: f is None }}) - jm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JM'), 'exclude': lambda f: f is None }}) - jp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JP'), 'exclude': lambda f: f is None }}) - jo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JO'), 'exclude': lambda f: f is None }}) - kz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KZ'), 'exclude': lambda f: f is None }}) - ke: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KE'), 'exclude': lambda f: f is None }}) - ki: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KI'), 'exclude': lambda f: f is None }}) - kp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KP'), 'exclude': lambda f: f is None }}) - kr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KR'), 'exclude': lambda f: f is None }}) - kw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KW'), 'exclude': lambda f: f is None }}) - kg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KG'), 'exclude': lambda f: f is None }}) - la: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LA'), 'exclude': lambda f: f is None }}) - lv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LV'), 'exclude': lambda f: f is None }}) - lb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LB'), 'exclude': lambda f: f is None }}) - ls: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LS'), 'exclude': lambda f: f is None }}) - lr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LR'), 'exclude': lambda f: f is None }}) - ly: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LY'), 'exclude': lambda f: f is None }}) - li: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LI'), 'exclude': lambda f: f is None }}) - lt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LT'), 'exclude': lambda f: f is None }}) - lu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LU'), 'exclude': lambda f: f is None }}) - mo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MO'), 'exclude': lambda f: f is None }}) - mg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MG'), 'exclude': lambda f: f is None }}) - mw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MW'), 'exclude': lambda f: f is None }}) - my: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MY'), 'exclude': lambda f: f is None }}) - mv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MV'), 'exclude': lambda f: f is None }}) - ml: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ML'), 'exclude': lambda f: f is None }}) - mt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MT'), 'exclude': lambda f: f is None }}) - mh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MH'), 'exclude': lambda f: f is None }}) - mq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MQ'), 'exclude': lambda f: f is None }}) - mr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MR'), 'exclude': lambda f: f is None }}) - mu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MU'), 'exclude': lambda f: f is None }}) - yt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('YT'), 'exclude': lambda f: f is None }}) - mx: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MX'), 'exclude': lambda f: f is None }}) - fm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('FM'), 'exclude': lambda f: f is None }}) - md: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MD'), 'exclude': lambda f: f is None }}) - mc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MC'), 'exclude': lambda f: f is None }}) - mn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MN'), 'exclude': lambda f: f is None }}) - ms: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MS'), 'exclude': lambda f: f is None }}) - ma: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MA'), 'exclude': lambda f: f is None }}) - mz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MZ'), 'exclude': lambda f: f is None }}) - mm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MM'), 'exclude': lambda f: f is None }}) - na: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NA'), 'exclude': lambda f: f is None }}) - nr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NR'), 'exclude': lambda f: f is None }}) - np: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NP'), 'exclude': lambda f: f is None }}) - nl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NL'), 'exclude': lambda f: f is None }}) - nc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NC'), 'exclude': lambda f: f is None }}) - nz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NZ'), 'exclude': lambda f: f is None }}) - ni: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NI'), 'exclude': lambda f: f is None }}) - ne: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NE'), 'exclude': lambda f: f is None }}) - ng: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NG'), 'exclude': lambda f: f is None }}) - nu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NU'), 'exclude': lambda f: f is None }}) - nf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NF'), 'exclude': lambda f: f is None }}) - mk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MK'), 'exclude': lambda f: f is None }}) - mp: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MP'), 'exclude': lambda f: f is None }}) - no: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('NO'), 'exclude': lambda f: f is None }}) - om: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('OM'), 'exclude': lambda f: f is None }}) - pk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PK'), 'exclude': lambda f: f is None }}) - pw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PW'), 'exclude': lambda f: f is None }}) - ps: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PS'), 'exclude': lambda f: f is None }}) - pa: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PA'), 'exclude': lambda f: f is None }}) - pg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PG'), 'exclude': lambda f: f is None }}) - py: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PY'), 'exclude': lambda f: f is None }}) - pe: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PE'), 'exclude': lambda f: f is None }}) - ph: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PH'), 'exclude': lambda f: f is None }}) - pn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PN'), 'exclude': lambda f: f is None }}) - pl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PL'), 'exclude': lambda f: f is None }}) - pt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PT'), 'exclude': lambda f: f is None }}) - pr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PR'), 'exclude': lambda f: f is None }}) - qa: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('QA'), 'exclude': lambda f: f is None }}) - re: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RE'), 'exclude': lambda f: f is None }}) - ro: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RO'), 'exclude': lambda f: f is None }}) - ru: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RU'), 'exclude': lambda f: f is None }}) - rw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RW'), 'exclude': lambda f: f is None }}) - sh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SH'), 'exclude': lambda f: f is None }}) - kn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('KN'), 'exclude': lambda f: f is None }}) - lc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LC'), 'exclude': lambda f: f is None }}) - pm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('PM'), 'exclude': lambda f: f is None }}) - vc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VC'), 'exclude': lambda f: f is None }}) - ws: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('WS'), 'exclude': lambda f: f is None }}) - sm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SM'), 'exclude': lambda f: f is None }}) - st: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ST'), 'exclude': lambda f: f is None }}) - sa: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SA'), 'exclude': lambda f: f is None }}) - sn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SN'), 'exclude': lambda f: f is None }}) - sc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SC'), 'exclude': lambda f: f is None }}) - sl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SL'), 'exclude': lambda f: f is None }}) - sg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SG'), 'exclude': lambda f: f is None }}) - sk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SK'), 'exclude': lambda f: f is None }}) - si: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SI'), 'exclude': lambda f: f is None }}) - sb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SB'), 'exclude': lambda f: f is None }}) - so: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SO'), 'exclude': lambda f: f is None }}) - za: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ZA'), 'exclude': lambda f: f is None }}) - gs: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GS'), 'exclude': lambda f: f is None }}) - es: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ES'), 'exclude': lambda f: f is None }}) - lk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('LK'), 'exclude': lambda f: f is None }}) - sd: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SD'), 'exclude': lambda f: f is None }}) - sr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SR'), 'exclude': lambda f: f is None }}) - sj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SJ'), 'exclude': lambda f: f is None }}) - sz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SZ'), 'exclude': lambda f: f is None }}) - se: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SE'), 'exclude': lambda f: f is None }}) - ch: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CH'), 'exclude': lambda f: f is None }}) - sy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SY'), 'exclude': lambda f: f is None }}) - tw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TW'), 'exclude': lambda f: f is None }}) - tj: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TJ'), 'exclude': lambda f: f is None }}) - tz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TZ'), 'exclude': lambda f: f is None }}) - th: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TH'), 'exclude': lambda f: f is None }}) - tl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TL'), 'exclude': lambda f: f is None }}) - tg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TG'), 'exclude': lambda f: f is None }}) - tk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TK'), 'exclude': lambda f: f is None }}) - to: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TO'), 'exclude': lambda f: f is None }}) - tt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TT'), 'exclude': lambda f: f is None }}) - tn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TN'), 'exclude': lambda f: f is None }}) - tr: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TR'), 'exclude': lambda f: f is None }}) - tm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TM'), 'exclude': lambda f: f is None }}) - tc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TC'), 'exclude': lambda f: f is None }}) - tv: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('TV'), 'exclude': lambda f: f is None }}) - ug: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UG'), 'exclude': lambda f: f is None }}) - ua: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UA'), 'exclude': lambda f: f is None }}) - ae: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AE'), 'exclude': lambda f: f is None }}) - gb: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GB'), 'exclude': lambda f: f is None }}) - us: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('US'), 'exclude': lambda f: f is None }}) - um: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UM'), 'exclude': lambda f: f is None }}) - uy: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UY'), 'exclude': lambda f: f is None }}) - uz: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('UZ'), 'exclude': lambda f: f is None }}) - vu: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VU'), 'exclude': lambda f: f is None }}) - ve: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VE'), 'exclude': lambda f: f is None }}) - vn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VN'), 'exclude': lambda f: f is None }}) - vg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VG'), 'exclude': lambda f: f is None }}) - vi: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('VI'), 'exclude': lambda f: f is None }}) - wf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('WF'), 'exclude': lambda f: f is None }}) - eh: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('EH'), 'exclude': lambda f: f is None }}) - ye: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('YE'), 'exclude': lambda f: f is None }}) - zm: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ZM'), 'exclude': lambda f: f is None }}) - zw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ZW'), 'exclude': lambda f: f is None }}) - ax: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('AX'), 'exclude': lambda f: f is None }}) - bq: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BQ'), 'exclude': lambda f: f is None }}) - cw: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('CW'), 'exclude': lambda f: f is None }}) - gg: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('GG'), 'exclude': lambda f: f is None }}) - im: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('IM'), 'exclude': lambda f: f is None }}) - je: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('JE'), 'exclude': lambda f: f is None }}) - me: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ME'), 'exclude': lambda f: f is None }}) - bl: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('BL'), 'exclude': lambda f: f is None }}) - mf: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('MF'), 'exclude': lambda f: f is None }}) - rs: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('RS'), 'exclude': lambda f: f is None }}) - sx: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SX'), 'exclude': lambda f: f is None }}) - ss: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('SS'), 'exclude': lambda f: f is None }}) - xk: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('XK'), 'exclude': lambda f: f is None }}) + + af: NotRequired[str] + al: NotRequired[str] + dz: NotRequired[str] + as_: NotRequired[str] + ad: NotRequired[str] + ao: NotRequired[str] + ai: NotRequired[str] + aq: NotRequired[str] + ag: NotRequired[str] + ar: NotRequired[str] + am: NotRequired[str] + aw: NotRequired[str] + au: NotRequired[str] + at: NotRequired[str] + az: NotRequired[str] + bs: NotRequired[str] + bh: NotRequired[str] + bd: NotRequired[str] + bb: NotRequired[str] + by: NotRequired[str] + be: NotRequired[str] + bz: NotRequired[str] + bj: NotRequired[str] + bm: NotRequired[str] + bt: NotRequired[str] + bo: NotRequired[str] + ba: NotRequired[str] + bw: NotRequired[str] + bv: NotRequired[str] + br: NotRequired[str] + io: NotRequired[str] + bn: NotRequired[str] + bg: NotRequired[str] + bf: NotRequired[str] + bi: NotRequired[str] + kh: NotRequired[str] + cm: NotRequired[str] + ca: NotRequired[str] + cv: NotRequired[str] + ky: NotRequired[str] + cf: NotRequired[str] + td: NotRequired[str] + cl: NotRequired[str] + cn: NotRequired[str] + cx: NotRequired[str] + cc: NotRequired[str] + co: NotRequired[str] + km: NotRequired[str] + cg: NotRequired[str] + cd: NotRequired[str] + ck: NotRequired[str] + cr: NotRequired[str] + ci: NotRequired[str] + hr: NotRequired[str] + cu: NotRequired[str] + cy: NotRequired[str] + cz: NotRequired[str] + dk: NotRequired[str] + dj: NotRequired[str] + dm: NotRequired[str] + do: NotRequired[str] + ec: NotRequired[str] + eg: NotRequired[str] + sv: NotRequired[str] + gq: NotRequired[str] + er: NotRequired[str] + ee: NotRequired[str] + et: NotRequired[str] + fk: NotRequired[str] + fo: NotRequired[str] + fj: NotRequired[str] + fi: NotRequired[str] + fr: NotRequired[str] + gf: NotRequired[str] + pf: NotRequired[str] + tf: NotRequired[str] + ga: NotRequired[str] + gm: NotRequired[str] + ge: NotRequired[str] + de: NotRequired[str] + gh: NotRequired[str] + gi: NotRequired[str] + gr: NotRequired[str] + gl: NotRequired[str] + gd: NotRequired[str] + gp: NotRequired[str] + gu: NotRequired[str] + gt: NotRequired[str] + gn: NotRequired[str] + gw: NotRequired[str] + gy: NotRequired[str] + ht: NotRequired[str] + hm: NotRequired[str] + va: NotRequired[str] + hn: NotRequired[str] + hk: NotRequired[str] + hu: NotRequired[str] + is_: NotRequired[str] + in_: NotRequired[str] + id: NotRequired[str] + ir: NotRequired[str] + iq: NotRequired[str] + ie: NotRequired[str] + il: NotRequired[str] + it: NotRequired[str] + jm: NotRequired[str] + jp: NotRequired[str] + jo: NotRequired[str] + kz: NotRequired[str] + ke: NotRequired[str] + ki: NotRequired[str] + kp: NotRequired[str] + kr: NotRequired[str] + kw: NotRequired[str] + kg: NotRequired[str] + la: NotRequired[str] + lv: NotRequired[str] + lb: NotRequired[str] + ls: NotRequired[str] + lr: NotRequired[str] + ly: NotRequired[str] + li: NotRequired[str] + lt: NotRequired[str] + lu: NotRequired[str] + mo: NotRequired[str] + mg: NotRequired[str] + mw: NotRequired[str] + my: NotRequired[str] + mv: NotRequired[str] + ml: NotRequired[str] + mt: NotRequired[str] + mh: NotRequired[str] + mq: NotRequired[str] + mr: NotRequired[str] + mu: NotRequired[str] + yt: NotRequired[str] + mx: NotRequired[str] + fm: NotRequired[str] + md: NotRequired[str] + mc: NotRequired[str] + mn: NotRequired[str] + ms: NotRequired[str] + ma: NotRequired[str] + mz: NotRequired[str] + mm: NotRequired[str] + na: NotRequired[str] + nr: NotRequired[str] + np: NotRequired[str] + nl: NotRequired[str] + nc: NotRequired[str] + nz: NotRequired[str] + ni: NotRequired[str] + ne: NotRequired[str] + ng: NotRequired[str] + nu: NotRequired[str] + nf: NotRequired[str] + mk: NotRequired[str] + mp: NotRequired[str] + no: NotRequired[str] + om: NotRequired[str] + pk: NotRequired[str] + pw: NotRequired[str] + ps: NotRequired[str] + pa: NotRequired[str] + pg: NotRequired[str] + py: NotRequired[str] + pe: NotRequired[str] + ph: NotRequired[str] + pn: NotRequired[str] + pl: NotRequired[str] + pt: NotRequired[str] + pr: NotRequired[str] + qa: NotRequired[str] + re: NotRequired[str] + ro: NotRequired[str] + ru: NotRequired[str] + rw: NotRequired[str] + sh: NotRequired[str] + kn: NotRequired[str] + lc: NotRequired[str] + pm: NotRequired[str] + vc: NotRequired[str] + ws: NotRequired[str] + sm: NotRequired[str] + st: NotRequired[str] + sa: NotRequired[str] + sn: NotRequired[str] + sc: NotRequired[str] + sl: NotRequired[str] + sg: NotRequired[str] + sk: NotRequired[str] + si: NotRequired[str] + sb: NotRequired[str] + so: NotRequired[str] + za: NotRequired[str] + gs: NotRequired[str] + es: NotRequired[str] + lk: NotRequired[str] + sd: NotRequired[str] + sr: NotRequired[str] + sj: NotRequired[str] + sz: NotRequired[str] + se: NotRequired[str] + ch: NotRequired[str] + sy: NotRequired[str] + tw: NotRequired[str] + tj: NotRequired[str] + tz: NotRequired[str] + th: NotRequired[str] + tl: NotRequired[str] + tg: NotRequired[str] + tk: NotRequired[str] + to: NotRequired[str] + tt: NotRequired[str] + tn: NotRequired[str] + tr: NotRequired[str] + tm: NotRequired[str] + tc: NotRequired[str] + tv: NotRequired[str] + ug: NotRequired[str] + ua: NotRequired[str] + ae: NotRequired[str] + gb: NotRequired[str] + us: NotRequired[str] + um: NotRequired[str] + uy: NotRequired[str] + uz: NotRequired[str] + vu: NotRequired[str] + ve: NotRequired[str] + vn: NotRequired[str] + vg: NotRequired[str] + vi: NotRequired[str] + wf: NotRequired[str] + eh: NotRequired[str] + ye: NotRequired[str] + zm: NotRequired[str] + zw: NotRequired[str] + ax: NotRequired[str] + bq: NotRequired[str] + cw: NotRequired[str] + gg: NotRequired[str] + im: NotRequired[str] + je: NotRequired[str] + me: NotRequired[str] + bl: NotRequired[str] + mf: NotRequired[str] + rs: NotRequired[str] + sx: NotRequired[str] + ss: NotRequired[str] + xk: NotRequired[str] +class Geo(BaseModel): + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. Learn more: https://d.to/geo""" + + af: Annotated[Optional[str], pydantic.Field(alias="AF")] = None + al: Annotated[Optional[str], pydantic.Field(alias="AL")] = None + dz: Annotated[Optional[str], pydantic.Field(alias="DZ")] = None + as_: Annotated[Optional[str], pydantic.Field(alias="AS")] = None + ad: Annotated[Optional[str], pydantic.Field(alias="AD")] = None + ao: Annotated[Optional[str], pydantic.Field(alias="AO")] = None + ai: Annotated[Optional[str], pydantic.Field(alias="AI")] = None + aq: Annotated[Optional[str], pydantic.Field(alias="AQ")] = None + ag: Annotated[Optional[str], pydantic.Field(alias="AG")] = None + ar: Annotated[Optional[str], pydantic.Field(alias="AR")] = None + am: Annotated[Optional[str], pydantic.Field(alias="AM")] = None + aw: Annotated[Optional[str], pydantic.Field(alias="AW")] = None + au: Annotated[Optional[str], pydantic.Field(alias="AU")] = None + at: Annotated[Optional[str], pydantic.Field(alias="AT")] = None + az: Annotated[Optional[str], pydantic.Field(alias="AZ")] = None + bs: Annotated[Optional[str], pydantic.Field(alias="BS")] = None + bh: Annotated[Optional[str], pydantic.Field(alias="BH")] = None + bd: Annotated[Optional[str], pydantic.Field(alias="BD")] = None + bb: Annotated[Optional[str], pydantic.Field(alias="BB")] = None + by: Annotated[Optional[str], pydantic.Field(alias="BY")] = None + be: Annotated[Optional[str], pydantic.Field(alias="BE")] = None + bz: Annotated[Optional[str], pydantic.Field(alias="BZ")] = None + bj: Annotated[Optional[str], pydantic.Field(alias="BJ")] = None + bm: Annotated[Optional[str], pydantic.Field(alias="BM")] = None + bt: Annotated[Optional[str], pydantic.Field(alias="BT")] = None + bo: Annotated[Optional[str], pydantic.Field(alias="BO")] = None + ba: Annotated[Optional[str], pydantic.Field(alias="BA")] = None + bw: Annotated[Optional[str], pydantic.Field(alias="BW")] = None + bv: Annotated[Optional[str], pydantic.Field(alias="BV")] = None + br: Annotated[Optional[str], pydantic.Field(alias="BR")] = None + io: Annotated[Optional[str], pydantic.Field(alias="IO")] = None + bn: Annotated[Optional[str], pydantic.Field(alias="BN")] = None + bg: Annotated[Optional[str], pydantic.Field(alias="BG")] = None + bf: Annotated[Optional[str], pydantic.Field(alias="BF")] = None + bi: Annotated[Optional[str], pydantic.Field(alias="BI")] = None + kh: Annotated[Optional[str], pydantic.Field(alias="KH")] = None + cm: Annotated[Optional[str], pydantic.Field(alias="CM")] = None + ca: Annotated[Optional[str], pydantic.Field(alias="CA")] = None + cv: Annotated[Optional[str], pydantic.Field(alias="CV")] = None + ky: Annotated[Optional[str], pydantic.Field(alias="KY")] = None + cf: Annotated[Optional[str], pydantic.Field(alias="CF")] = None + td: Annotated[Optional[str], pydantic.Field(alias="TD")] = None + cl: Annotated[Optional[str], pydantic.Field(alias="CL")] = None + cn: Annotated[Optional[str], pydantic.Field(alias="CN")] = None + cx: Annotated[Optional[str], pydantic.Field(alias="CX")] = None + cc: Annotated[Optional[str], pydantic.Field(alias="CC")] = None + co: Annotated[Optional[str], pydantic.Field(alias="CO")] = None + km: Annotated[Optional[str], pydantic.Field(alias="KM")] = None + cg: Annotated[Optional[str], pydantic.Field(alias="CG")] = None + cd: Annotated[Optional[str], pydantic.Field(alias="CD")] = None + ck: Annotated[Optional[str], pydantic.Field(alias="CK")] = None + cr: Annotated[Optional[str], pydantic.Field(alias="CR")] = None + ci: Annotated[Optional[str], pydantic.Field(alias="CI")] = None + hr: Annotated[Optional[str], pydantic.Field(alias="HR")] = None + cu: Annotated[Optional[str], pydantic.Field(alias="CU")] = None + cy: Annotated[Optional[str], pydantic.Field(alias="CY")] = None + cz: Annotated[Optional[str], pydantic.Field(alias="CZ")] = None + dk: Annotated[Optional[str], pydantic.Field(alias="DK")] = None + dj: Annotated[Optional[str], pydantic.Field(alias="DJ")] = None + dm: Annotated[Optional[str], pydantic.Field(alias="DM")] = None + do: Annotated[Optional[str], pydantic.Field(alias="DO")] = None + ec: Annotated[Optional[str], pydantic.Field(alias="EC")] = None + eg: Annotated[Optional[str], pydantic.Field(alias="EG")] = None + sv: Annotated[Optional[str], pydantic.Field(alias="SV")] = None + gq: Annotated[Optional[str], pydantic.Field(alias="GQ")] = None + er: Annotated[Optional[str], pydantic.Field(alias="ER")] = None + ee: Annotated[Optional[str], pydantic.Field(alias="EE")] = None + et: Annotated[Optional[str], pydantic.Field(alias="ET")] = None + fk: Annotated[Optional[str], pydantic.Field(alias="FK")] = None + fo: Annotated[Optional[str], pydantic.Field(alias="FO")] = None + fj: Annotated[Optional[str], pydantic.Field(alias="FJ")] = None + fi: Annotated[Optional[str], pydantic.Field(alias="FI")] = None + fr: Annotated[Optional[str], pydantic.Field(alias="FR")] = None + gf: Annotated[Optional[str], pydantic.Field(alias="GF")] = None + pf: Annotated[Optional[str], pydantic.Field(alias="PF")] = None + tf: Annotated[Optional[str], pydantic.Field(alias="TF")] = None + ga: Annotated[Optional[str], pydantic.Field(alias="GA")] = None + gm: Annotated[Optional[str], pydantic.Field(alias="GM")] = None + ge: Annotated[Optional[str], pydantic.Field(alias="GE")] = None + de: Annotated[Optional[str], pydantic.Field(alias="DE")] = None + gh: Annotated[Optional[str], pydantic.Field(alias="GH")] = None + gi: Annotated[Optional[str], pydantic.Field(alias="GI")] = None + gr: Annotated[Optional[str], pydantic.Field(alias="GR")] = None + gl: Annotated[Optional[str], pydantic.Field(alias="GL")] = None + gd: Annotated[Optional[str], pydantic.Field(alias="GD")] = None + gp: Annotated[Optional[str], pydantic.Field(alias="GP")] = None + gu: Annotated[Optional[str], pydantic.Field(alias="GU")] = None + gt: Annotated[Optional[str], pydantic.Field(alias="GT")] = None + gn: Annotated[Optional[str], pydantic.Field(alias="GN")] = None + gw: Annotated[Optional[str], pydantic.Field(alias="GW")] = None + gy: Annotated[Optional[str], pydantic.Field(alias="GY")] = None + ht: Annotated[Optional[str], pydantic.Field(alias="HT")] = None + hm: Annotated[Optional[str], pydantic.Field(alias="HM")] = None + va: Annotated[Optional[str], pydantic.Field(alias="VA")] = None + hn: Annotated[Optional[str], pydantic.Field(alias="HN")] = None + hk: Annotated[Optional[str], pydantic.Field(alias="HK")] = None + hu: Annotated[Optional[str], pydantic.Field(alias="HU")] = None + is_: Annotated[Optional[str], pydantic.Field(alias="IS")] = None + in_: Annotated[Optional[str], pydantic.Field(alias="IN")] = None + id: Annotated[Optional[str], pydantic.Field(alias="ID")] = None + ir: Annotated[Optional[str], pydantic.Field(alias="IR")] = None + iq: Annotated[Optional[str], pydantic.Field(alias="IQ")] = None + ie: Annotated[Optional[str], pydantic.Field(alias="IE")] = None + il: Annotated[Optional[str], pydantic.Field(alias="IL")] = None + it: Annotated[Optional[str], pydantic.Field(alias="IT")] = None + jm: Annotated[Optional[str], pydantic.Field(alias="JM")] = None + jp: Annotated[Optional[str], pydantic.Field(alias="JP")] = None + jo: Annotated[Optional[str], pydantic.Field(alias="JO")] = None + kz: Annotated[Optional[str], pydantic.Field(alias="KZ")] = None + ke: Annotated[Optional[str], pydantic.Field(alias="KE")] = None + ki: Annotated[Optional[str], pydantic.Field(alias="KI")] = None + kp: Annotated[Optional[str], pydantic.Field(alias="KP")] = None + kr: Annotated[Optional[str], pydantic.Field(alias="KR")] = None + kw: Annotated[Optional[str], pydantic.Field(alias="KW")] = None + kg: Annotated[Optional[str], pydantic.Field(alias="KG")] = None + la: Annotated[Optional[str], pydantic.Field(alias="LA")] = None + lv: Annotated[Optional[str], pydantic.Field(alias="LV")] = None + lb: Annotated[Optional[str], pydantic.Field(alias="LB")] = None + ls: Annotated[Optional[str], pydantic.Field(alias="LS")] = None + lr: Annotated[Optional[str], pydantic.Field(alias="LR")] = None + ly: Annotated[Optional[str], pydantic.Field(alias="LY")] = None + li: Annotated[Optional[str], pydantic.Field(alias="LI")] = None + lt: Annotated[Optional[str], pydantic.Field(alias="LT")] = None + lu: Annotated[Optional[str], pydantic.Field(alias="LU")] = None + mo: Annotated[Optional[str], pydantic.Field(alias="MO")] = None + mg: Annotated[Optional[str], pydantic.Field(alias="MG")] = None + mw: Annotated[Optional[str], pydantic.Field(alias="MW")] = None + my: Annotated[Optional[str], pydantic.Field(alias="MY")] = None + mv: Annotated[Optional[str], pydantic.Field(alias="MV")] = None + ml: Annotated[Optional[str], pydantic.Field(alias="ML")] = None + mt: Annotated[Optional[str], pydantic.Field(alias="MT")] = None + mh: Annotated[Optional[str], pydantic.Field(alias="MH")] = None + mq: Annotated[Optional[str], pydantic.Field(alias="MQ")] = None + mr: Annotated[Optional[str], pydantic.Field(alias="MR")] = None + mu: Annotated[Optional[str], pydantic.Field(alias="MU")] = None + yt: Annotated[Optional[str], pydantic.Field(alias="YT")] = None + mx: Annotated[Optional[str], pydantic.Field(alias="MX")] = None + fm: Annotated[Optional[str], pydantic.Field(alias="FM")] = None + md: Annotated[Optional[str], pydantic.Field(alias="MD")] = None + mc: Annotated[Optional[str], pydantic.Field(alias="MC")] = None + mn: Annotated[Optional[str], pydantic.Field(alias="MN")] = None + ms: Annotated[Optional[str], pydantic.Field(alias="MS")] = None + ma: Annotated[Optional[str], pydantic.Field(alias="MA")] = None + mz: Annotated[Optional[str], pydantic.Field(alias="MZ")] = None + mm: Annotated[Optional[str], pydantic.Field(alias="MM")] = None + na: Annotated[Optional[str], pydantic.Field(alias="NA")] = None + nr: Annotated[Optional[str], pydantic.Field(alias="NR")] = None + np: Annotated[Optional[str], pydantic.Field(alias="NP")] = None + nl: Annotated[Optional[str], pydantic.Field(alias="NL")] = None + nc: Annotated[Optional[str], pydantic.Field(alias="NC")] = None + nz: Annotated[Optional[str], pydantic.Field(alias="NZ")] = None + ni: Annotated[Optional[str], pydantic.Field(alias="NI")] = None + ne: Annotated[Optional[str], pydantic.Field(alias="NE")] = None + ng: Annotated[Optional[str], pydantic.Field(alias="NG")] = None + nu: Annotated[Optional[str], pydantic.Field(alias="NU")] = None + nf: Annotated[Optional[str], pydantic.Field(alias="NF")] = None + mk: Annotated[Optional[str], pydantic.Field(alias="MK")] = None + mp: Annotated[Optional[str], pydantic.Field(alias="MP")] = None + no: Annotated[Optional[str], pydantic.Field(alias="NO")] = None + om: Annotated[Optional[str], pydantic.Field(alias="OM")] = None + pk: Annotated[Optional[str], pydantic.Field(alias="PK")] = None + pw: Annotated[Optional[str], pydantic.Field(alias="PW")] = None + ps: Annotated[Optional[str], pydantic.Field(alias="PS")] = None + pa: Annotated[Optional[str], pydantic.Field(alias="PA")] = None + pg: Annotated[Optional[str], pydantic.Field(alias="PG")] = None + py: Annotated[Optional[str], pydantic.Field(alias="PY")] = None + pe: Annotated[Optional[str], pydantic.Field(alias="PE")] = None + ph: Annotated[Optional[str], pydantic.Field(alias="PH")] = None + pn: Annotated[Optional[str], pydantic.Field(alias="PN")] = None + pl: Annotated[Optional[str], pydantic.Field(alias="PL")] = None + pt: Annotated[Optional[str], pydantic.Field(alias="PT")] = None + pr: Annotated[Optional[str], pydantic.Field(alias="PR")] = None + qa: Annotated[Optional[str], pydantic.Field(alias="QA")] = None + re: Annotated[Optional[str], pydantic.Field(alias="RE")] = None + ro: Annotated[Optional[str], pydantic.Field(alias="RO")] = None + ru: Annotated[Optional[str], pydantic.Field(alias="RU")] = None + rw: Annotated[Optional[str], pydantic.Field(alias="RW")] = None + sh: Annotated[Optional[str], pydantic.Field(alias="SH")] = None + kn: Annotated[Optional[str], pydantic.Field(alias="KN")] = None + lc: Annotated[Optional[str], pydantic.Field(alias="LC")] = None + pm: Annotated[Optional[str], pydantic.Field(alias="PM")] = None + vc: Annotated[Optional[str], pydantic.Field(alias="VC")] = None + ws: Annotated[Optional[str], pydantic.Field(alias="WS")] = None + sm: Annotated[Optional[str], pydantic.Field(alias="SM")] = None + st: Annotated[Optional[str], pydantic.Field(alias="ST")] = None + sa: Annotated[Optional[str], pydantic.Field(alias="SA")] = None + sn: Annotated[Optional[str], pydantic.Field(alias="SN")] = None + sc: Annotated[Optional[str], pydantic.Field(alias="SC")] = None + sl: Annotated[Optional[str], pydantic.Field(alias="SL")] = None + sg: Annotated[Optional[str], pydantic.Field(alias="SG")] = None + sk: Annotated[Optional[str], pydantic.Field(alias="SK")] = None + si: Annotated[Optional[str], pydantic.Field(alias="SI")] = None + sb: Annotated[Optional[str], pydantic.Field(alias="SB")] = None + so: Annotated[Optional[str], pydantic.Field(alias="SO")] = None + za: Annotated[Optional[str], pydantic.Field(alias="ZA")] = None + gs: Annotated[Optional[str], pydantic.Field(alias="GS")] = None + es: Annotated[Optional[str], pydantic.Field(alias="ES")] = None + lk: Annotated[Optional[str], pydantic.Field(alias="LK")] = None + sd: Annotated[Optional[str], pydantic.Field(alias="SD")] = None + sr: Annotated[Optional[str], pydantic.Field(alias="SR")] = None + sj: Annotated[Optional[str], pydantic.Field(alias="SJ")] = None + sz: Annotated[Optional[str], pydantic.Field(alias="SZ")] = None + se: Annotated[Optional[str], pydantic.Field(alias="SE")] = None + ch: Annotated[Optional[str], pydantic.Field(alias="CH")] = None + sy: Annotated[Optional[str], pydantic.Field(alias="SY")] = None + tw: Annotated[Optional[str], pydantic.Field(alias="TW")] = None + tj: Annotated[Optional[str], pydantic.Field(alias="TJ")] = None + tz: Annotated[Optional[str], pydantic.Field(alias="TZ")] = None + th: Annotated[Optional[str], pydantic.Field(alias="TH")] = None + tl: Annotated[Optional[str], pydantic.Field(alias="TL")] = None + tg: Annotated[Optional[str], pydantic.Field(alias="TG")] = None + tk: Annotated[Optional[str], pydantic.Field(alias="TK")] = None + to: Annotated[Optional[str], pydantic.Field(alias="TO")] = None + tt: Annotated[Optional[str], pydantic.Field(alias="TT")] = None + tn: Annotated[Optional[str], pydantic.Field(alias="TN")] = None + tr: Annotated[Optional[str], pydantic.Field(alias="TR")] = None + tm: Annotated[Optional[str], pydantic.Field(alias="TM")] = None + tc: Annotated[Optional[str], pydantic.Field(alias="TC")] = None + tv: Annotated[Optional[str], pydantic.Field(alias="TV")] = None + ug: Annotated[Optional[str], pydantic.Field(alias="UG")] = None + ua: Annotated[Optional[str], pydantic.Field(alias="UA")] = None + ae: Annotated[Optional[str], pydantic.Field(alias="AE")] = None + gb: Annotated[Optional[str], pydantic.Field(alias="GB")] = None + us: Annotated[Optional[str], pydantic.Field(alias="US")] = None + um: Annotated[Optional[str], pydantic.Field(alias="UM")] = None + uy: Annotated[Optional[str], pydantic.Field(alias="UY")] = None + uz: Annotated[Optional[str], pydantic.Field(alias="UZ")] = None + vu: Annotated[Optional[str], pydantic.Field(alias="VU")] = None + ve: Annotated[Optional[str], pydantic.Field(alias="VE")] = None + vn: Annotated[Optional[str], pydantic.Field(alias="VN")] = None + vg: Annotated[Optional[str], pydantic.Field(alias="VG")] = None + vi: Annotated[Optional[str], pydantic.Field(alias="VI")] = None + wf: Annotated[Optional[str], pydantic.Field(alias="WF")] = None + eh: Annotated[Optional[str], pydantic.Field(alias="EH")] = None + ye: Annotated[Optional[str], pydantic.Field(alias="YE")] = None + zm: Annotated[Optional[str], pydantic.Field(alias="ZM")] = None + zw: Annotated[Optional[str], pydantic.Field(alias="ZW")] = None + ax: Annotated[Optional[str], pydantic.Field(alias="AX")] = None + bq: Annotated[Optional[str], pydantic.Field(alias="BQ")] = None + cw: Annotated[Optional[str], pydantic.Field(alias="CW")] = None + gg: Annotated[Optional[str], pydantic.Field(alias="GG")] = None + im: Annotated[Optional[str], pydantic.Field(alias="IM")] = None + je: Annotated[Optional[str], pydantic.Field(alias="JE")] = None + me: Annotated[Optional[str], pydantic.Field(alias="ME")] = None + bl: Annotated[Optional[str], pydantic.Field(alias="BL")] = None + mf: Annotated[Optional[str], pydantic.Field(alias="MF")] = None + rs: Annotated[Optional[str], pydantic.Field(alias="RS")] = None + sx: Annotated[Optional[str], pydantic.Field(alias="SX")] = None + ss: Annotated[Optional[str], pydantic.Field(alias="SS")] = None + xk: Annotated[Optional[str], pydantic.Field(alias="XK")] = None + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class LinkSchema: - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class LinkSchemaTypedDict(TypedDict): + id: str r"""The unique ID of the short link.""" - domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) + domain: str r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" - key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }}) + key: str r"""The short link slug. If not provided, a random 7-character slug will be generated.""" - external_id: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('externalId') }}) + external_id: Nullable[str] r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with 'ext_' when passed as a query parameter.""" - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) + url: str r"""The destination URL of the short link.""" - expires_at: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiresAt') }}) + expires_at: Nullable[str] r"""The date and time when the short link will expire in ISO-8601 format.""" - expired_url: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl') }}) + expired_url: Nullable[str] r"""The URL to redirect to when the short link has expired.""" - password: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + password: Nullable[str] r"""The password required to access the destination URL of the short link.""" - title: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title') }}) + title: Nullable[str] r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - description: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description') }}) + description: Nullable[str] r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - image: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image') }}) + image: Nullable[str] r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - ios: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ios') }}) + ios: Nullable[str] r"""The iOS destination URL for the short link for iOS device targeting.""" - android: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('android') }}) + android: Nullable[str] r"""The Android destination URL for the short link for Android device targeting.""" - geo: Optional[Geo] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo') }}) + geo: Nullable[GeoTypedDict] r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. Learn more: https://d.to/geo""" - tag_id: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId') }}) - r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tags` instead. - - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - tags: Optional[List[TagSchema]] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tags') }}) + tag_id: Nullable[str] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tags` instead.""" + tags: Nullable[List[TagSchemaTypedDict]] r"""The tags assigned to the short link.""" - comments: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comments') }}) + comments: Nullable[str] r"""The comments for the short link.""" - short_link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shortLink') }}) + short_link: str r"""The full URL of the short link, including the https protocol (e.g. `https://dub.sh/try`).""" - qr_code: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('qrCode') }}) + qr_code: str r"""The full URL of the QR code for the short link (e.g. `https://api.dub.co/qr?url=https://dub.sh/try`).""" - utm_source: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('utm_source') }}) + utm_source: Nullable[str] r"""The UTM source of the short link.""" - utm_medium: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('utm_medium') }}) + utm_medium: Nullable[str] r"""The UTM medium of the short link.""" - utm_campaign: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('utm_campaign') }}) + utm_campaign: Nullable[str] r"""The UTM campaign of the short link.""" - utm_term: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('utm_term') }}) + utm_term: Nullable[str] r"""The UTM term of the short link.""" - utm_content: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('utm_content') }}) + utm_content: Nullable[str] r"""The UTM content of the short link.""" - user_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('userId') }}) + user_id: str r"""The user ID of the creator of the short link.""" - workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) + workspace_id: str r"""The workspace ID of the short link.""" - last_clicked: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lastClicked') }}) + last_clicked: Nullable[str] r"""The date and time when the short link was last clicked.""" - created_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('createdAt') }}) + created_at: str r"""The date and time when the short link was created.""" - updated_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('updatedAt') }}) + updated_at: str r"""The date and time when the short link was last updated.""" - project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('projectId') }}) - r"""The project ID of the short link. This field is deprecated – use `workspaceId` instead. + project_id: str + r"""The project ID of the short link. This field is deprecated – use `workspaceId` instead.""" + track_conversion: NotRequired[bool] + r"""[BETA] Whether to track conversions for the short link.""" + archived: NotRequired[bool] + r"""Whether the short link is archived.""" + proxy: NotRequired[bool] + r"""Whether the short link uses Custom Social Media Cards feature.""" + rewrite: NotRequired[bool] + r"""Whether the short link uses link cloaking.""" + do_index: NotRequired[bool] + r"""Whether to allow search engines to index the short link.""" + public_stats: NotRequired[bool] + r"""Whether the short link's stats are publicly accessible.""" + clicks: NotRequired[float] + r"""The number of clicks on the short link.""" + leads: NotRequired[float] + r"""[BETA]: The number of leads the short links has generated.""" + sales: NotRequired[float] + r"""[BETA]: The number of sales the short links has generated.""" + - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - track_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trackConversion'), 'exclude': lambda f: f is None }}) +class LinkSchema(BaseModel): + id: str + r"""The unique ID of the short link.""" + domain: str + r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" + key: str + r"""The short link slug. If not provided, a random 7-character slug will be generated.""" + external_id: Annotated[Nullable[str], pydantic.Field(alias="externalId")] + r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with 'ext_' when passed as a query parameter.""" + url: str + r"""The destination URL of the short link.""" + expires_at: Annotated[Nullable[str], pydantic.Field(alias="expiresAt")] + r"""The date and time when the short link will expire in ISO-8601 format.""" + expired_url: Annotated[Nullable[str], pydantic.Field(alias="expiredUrl")] + r"""The URL to redirect to when the short link has expired.""" + password: Nullable[str] + r"""The password required to access the destination URL of the short link.""" + title: Nullable[str] + r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + description: Nullable[str] + r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + image: Nullable[str] + r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + ios: Nullable[str] + r"""The iOS destination URL for the short link for iOS device targeting.""" + android: Nullable[str] + r"""The Android destination URL for the short link for Android device targeting.""" + geo: Nullable[Geo] + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`. Learn more: https://d.to/geo""" + tag_id: Annotated[Nullable[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="tagId")] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tags` instead.""" + tags: Nullable[List[TagSchema]] + r"""The tags assigned to the short link.""" + comments: Nullable[str] + r"""The comments for the short link.""" + short_link: Annotated[str, pydantic.Field(alias="shortLink")] + r"""The full URL of the short link, including the https protocol (e.g. `https://dub.sh/try`).""" + qr_code: Annotated[str, pydantic.Field(alias="qrCode")] + r"""The full URL of the QR code for the short link (e.g. `https://api.dub.co/qr?url=https://dub.sh/try`).""" + utm_source: Nullable[str] + r"""The UTM source of the short link.""" + utm_medium: Nullable[str] + r"""The UTM medium of the short link.""" + utm_campaign: Nullable[str] + r"""The UTM campaign of the short link.""" + utm_term: Nullable[str] + r"""The UTM term of the short link.""" + utm_content: Nullable[str] + r"""The UTM content of the short link.""" + user_id: Annotated[str, pydantic.Field(alias="userId")] + r"""The user ID of the creator of the short link.""" + workspace_id: Annotated[str, pydantic.Field(alias="workspaceId")] + r"""The workspace ID of the short link.""" + last_clicked: Annotated[Nullable[str], pydantic.Field(alias="lastClicked")] + r"""The date and time when the short link was last clicked.""" + created_at: Annotated[str, pydantic.Field(alias="createdAt")] + r"""The date and time when the short link was created.""" + updated_at: Annotated[str, pydantic.Field(alias="updatedAt")] + r"""The date and time when the short link was last updated.""" + project_id: Annotated[str, pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="projectId")] + r"""The project ID of the short link. This field is deprecated – use `workspaceId` instead.""" + track_conversion: Annotated[Optional[bool], pydantic.Field(alias="trackConversion")] = False r"""[BETA] Whether to track conversions for the short link.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: Optional[bool] = False r"""Whether the short link is archived.""" - proxy: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('proxy'), 'exclude': lambda f: f is None }}) + proxy: Optional[bool] = False r"""Whether the short link uses Custom Social Media Cards feature.""" - rewrite: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rewrite'), 'exclude': lambda f: f is None }}) + rewrite: Optional[bool] = False r"""Whether the short link uses link cloaking.""" - do_index: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doIndex'), 'exclude': lambda f: f is None }}) + do_index: Annotated[Optional[bool], pydantic.Field(alias="doIndex")] = False r"""Whether to allow search engines to index the short link.""" - public_stats: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicStats'), 'exclude': lambda f: f is None }}) + public_stats: Annotated[Optional[bool], pydantic.Field(alias="publicStats")] = False r"""Whether the short link's stats are publicly accessible.""" - clicks: Optional[float] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clicks'), 'exclude': lambda f: f is None }}) + clicks: Optional[float] = 0 r"""The number of clicks on the short link.""" - leads: Optional[float] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('leads'), 'exclude': lambda f: f is None }}) + leads: Optional[float] = 0 r"""[BETA]: The number of leads the short links has generated.""" - sales: Optional[float] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales'), 'exclude': lambda f: f is None }}) + sales: Optional[float] = 0 r"""[BETA]: The number of sales the short links has generated.""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["trackConversion", "archived", "proxy", "rewrite", "doIndex", "publicStats", "clicks", "leads", "sales"] + nullable_fields = ["externalId", "expiresAt", "expiredUrl", "password", "title", "description", "image", "ios", "android", "geo", "tagId", "tags", "comments", "utm_source", "utm_medium", "utm_campaign", "utm_term", "utm_content", "lastClicked"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/components/salesbrowsers.py b/src/dub/models/components/salesbrowsers.py index 2554874..95d2a7b 100644 --- a/src/dub/models/components/salesbrowsers.py +++ b/src/dub/models/components/salesbrowsers.py @@ -1,19 +1,24 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesBrowsers: - browser: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('browser') }}) +class SalesBrowsersTypedDict(TypedDict): + browser: str r"""The name of the browser""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this browser""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this browser""" +class SalesBrowsers(BaseModel): + browser: str + r"""The name of the browser""" + sales: float + r"""The number of sales from this browser""" + amount: float + r"""The total amount of sales from this browser""" + diff --git a/src/dub/models/components/salescities.py b/src/dub/models/components/salescities.py index ad6d696..25b29ee 100644 --- a/src/dub/models/components/salescities.py +++ b/src/dub/models/components/salescities.py @@ -1,276 +1,283 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class SalesCitiesCountry(str, Enum): r"""The 2-letter country code of the city: https://d.to/geo""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesCities: - city: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('city') }}) +class SalesCitiesTypedDict(TypedDict): + city: str r"""The name of the city""" - country: SalesCitiesCountry = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country') }}) + country: SalesCitiesCountry r"""The 2-letter country code of the city: https://d.to/geo""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this city""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this city""" +class SalesCities(BaseModel): + city: str + r"""The name of the city""" + country: SalesCitiesCountry + r"""The 2-letter country code of the city: https://d.to/geo""" + sales: float + r"""The number of sales from this city""" + amount: float + r"""The total amount of sales from this city""" + diff --git a/src/dub/models/components/salescount.py b/src/dub/models/components/salescount.py index ea1c8cb..fcc9a4a 100644 --- a/src/dub/models/components/salescount.py +++ b/src/dub/models/components/salescount.py @@ -1,17 +1,20 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesCount: - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) +class SalesCountTypedDict(TypedDict): + sales: float r"""The total number of sales""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales""" +class SalesCount(BaseModel): + sales: float + r"""The total number of sales""" + amount: float + r"""The total amount of sales""" + diff --git a/src/dub/models/components/salescountries.py b/src/dub/models/components/salescountries.py index 41d1a48..fc2ba7b 100644 --- a/src/dub/models/components/salescountries.py +++ b/src/dub/models/components/salescountries.py @@ -1,274 +1,279 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class SalesCountriesCountry(str, Enum): r"""The 2-letter country code: https://d.to/geo""" - AF = 'AF' - AL = 'AL' - DZ = 'DZ' - AS = 'AS' - AD = 'AD' - AO = 'AO' - AI = 'AI' - AQ = 'AQ' - AG = 'AG' - AR = 'AR' - AM = 'AM' - AW = 'AW' - AU = 'AU' - AT = 'AT' - AZ = 'AZ' - BS = 'BS' - BH = 'BH' - BD = 'BD' - BB = 'BB' - BY = 'BY' - BE = 'BE' - BZ = 'BZ' - BJ = 'BJ' - BM = 'BM' - BT = 'BT' - BO = 'BO' - BA = 'BA' - BW = 'BW' - BV = 'BV' - BR = 'BR' - IO = 'IO' - BN = 'BN' - BG = 'BG' - BF = 'BF' - BI = 'BI' - KH = 'KH' - CM = 'CM' - CA = 'CA' - CV = 'CV' - KY = 'KY' - CF = 'CF' - TD = 'TD' - CL = 'CL' - CN = 'CN' - CX = 'CX' - CC = 'CC' - CO = 'CO' - KM = 'KM' - CG = 'CG' - CD = 'CD' - CK = 'CK' - CR = 'CR' - CI = 'CI' - HR = 'HR' - CU = 'CU' - CY = 'CY' - CZ = 'CZ' - DK = 'DK' - DJ = 'DJ' - DM = 'DM' - DO = 'DO' - EC = 'EC' - EG = 'EG' - SV = 'SV' - GQ = 'GQ' - ER = 'ER' - EE = 'EE' - ET = 'ET' - FK = 'FK' - FO = 'FO' - FJ = 'FJ' - FI = 'FI' - FR = 'FR' - GF = 'GF' - PF = 'PF' - TF = 'TF' - GA = 'GA' - GM = 'GM' - GE = 'GE' - DE = 'DE' - GH = 'GH' - GI = 'GI' - GR = 'GR' - GL = 'GL' - GD = 'GD' - GP = 'GP' - GU = 'GU' - GT = 'GT' - GN = 'GN' - GW = 'GW' - GY = 'GY' - HT = 'HT' - HM = 'HM' - VA = 'VA' - HN = 'HN' - HK = 'HK' - HU = 'HU' - IS = 'IS' - IN = 'IN' - ID = 'ID' - IR = 'IR' - IQ = 'IQ' - IE = 'IE' - IL = 'IL' - IT = 'IT' - JM = 'JM' - JP = 'JP' - JO = 'JO' - KZ = 'KZ' - KE = 'KE' - KI = 'KI' - KP = 'KP' - KR = 'KR' - KW = 'KW' - KG = 'KG' - LA = 'LA' - LV = 'LV' - LB = 'LB' - LS = 'LS' - LR = 'LR' - LY = 'LY' - LI = 'LI' - LT = 'LT' - LU = 'LU' - MO = 'MO' - MG = 'MG' - MW = 'MW' - MY = 'MY' - MV = 'MV' - ML = 'ML' - MT = 'MT' - MH = 'MH' - MQ = 'MQ' - MR = 'MR' - MU = 'MU' - YT = 'YT' - MX = 'MX' - FM = 'FM' - MD = 'MD' - MC = 'MC' - MN = 'MN' - MS = 'MS' - MA = 'MA' - MZ = 'MZ' - MM = 'MM' - NA = 'NA' - NR = 'NR' - NP = 'NP' - NL = 'NL' - NC = 'NC' - NZ = 'NZ' - NI = 'NI' - NE = 'NE' - NG = 'NG' - NU = 'NU' - NF = 'NF' - MK = 'MK' - MP = 'MP' - NO = 'NO' - OM = 'OM' - PK = 'PK' - PW = 'PW' - PS = 'PS' - PA = 'PA' - PG = 'PG' - PY = 'PY' - PE = 'PE' - PH = 'PH' - PN = 'PN' - PL = 'PL' - PT = 'PT' - PR = 'PR' - QA = 'QA' - RE = 'RE' - RO = 'RO' - RU = 'RU' - RW = 'RW' - SH = 'SH' - KN = 'KN' - LC = 'LC' - PM = 'PM' - VC = 'VC' - WS = 'WS' - SM = 'SM' - ST = 'ST' - SA = 'SA' - SN = 'SN' - SC = 'SC' - SL = 'SL' - SG = 'SG' - SK = 'SK' - SI = 'SI' - SB = 'SB' - SO = 'SO' - ZA = 'ZA' - GS = 'GS' - ES = 'ES' - LK = 'LK' - SD = 'SD' - SR = 'SR' - SJ = 'SJ' - SZ = 'SZ' - SE = 'SE' - CH = 'CH' - SY = 'SY' - TW = 'TW' - TJ = 'TJ' - TZ = 'TZ' - TH = 'TH' - TL = 'TL' - TG = 'TG' - TK = 'TK' - TO = 'TO' - TT = 'TT' - TN = 'TN' - TR = 'TR' - TM = 'TM' - TC = 'TC' - TV = 'TV' - UG = 'UG' - UA = 'UA' - AE = 'AE' - GB = 'GB' - US = 'US' - UM = 'UM' - UY = 'UY' - UZ = 'UZ' - VU = 'VU' - VE = 'VE' - VN = 'VN' - VG = 'VG' - VI = 'VI' - WF = 'WF' - EH = 'EH' - YE = 'YE' - ZM = 'ZM' - ZW = 'ZW' - AX = 'AX' - BQ = 'BQ' - CW = 'CW' - GG = 'GG' - IM = 'IM' - JE = 'JE' - ME = 'ME' - BL = 'BL' - MF = 'MF' - RS = 'RS' - SX = 'SX' - SS = 'SS' - XK = 'XK' + AF = "AF" + AL = "AL" + DZ = "DZ" + AS = "AS" + AD = "AD" + AO = "AO" + AI = "AI" + AQ = "AQ" + AG = "AG" + AR = "AR" + AM = "AM" + AW = "AW" + AU = "AU" + AT = "AT" + AZ = "AZ" + BS = "BS" + BH = "BH" + BD = "BD" + BB = "BB" + BY = "BY" + BE = "BE" + BZ = "BZ" + BJ = "BJ" + BM = "BM" + BT = "BT" + BO = "BO" + BA = "BA" + BW = "BW" + BV = "BV" + BR = "BR" + IO = "IO" + BN = "BN" + BG = "BG" + BF = "BF" + BI = "BI" + KH = "KH" + CM = "CM" + CA = "CA" + CV = "CV" + KY = "KY" + CF = "CF" + TD = "TD" + CL = "CL" + CN = "CN" + CX = "CX" + CC = "CC" + CO = "CO" + KM = "KM" + CG = "CG" + CD = "CD" + CK = "CK" + CR = "CR" + CI = "CI" + HR = "HR" + CU = "CU" + CY = "CY" + CZ = "CZ" + DK = "DK" + DJ = "DJ" + DM = "DM" + DO = "DO" + EC = "EC" + EG = "EG" + SV = "SV" + GQ = "GQ" + ER = "ER" + EE = "EE" + ET = "ET" + FK = "FK" + FO = "FO" + FJ = "FJ" + FI = "FI" + FR = "FR" + GF = "GF" + PF = "PF" + TF = "TF" + GA = "GA" + GM = "GM" + GE = "GE" + DE = "DE" + GH = "GH" + GI = "GI" + GR = "GR" + GL = "GL" + GD = "GD" + GP = "GP" + GU = "GU" + GT = "GT" + GN = "GN" + GW = "GW" + GY = "GY" + HT = "HT" + HM = "HM" + VA = "VA" + HN = "HN" + HK = "HK" + HU = "HU" + IS = "IS" + IN = "IN" + ID = "ID" + IR = "IR" + IQ = "IQ" + IE = "IE" + IL = "IL" + IT = "IT" + JM = "JM" + JP = "JP" + JO = "JO" + KZ = "KZ" + KE = "KE" + KI = "KI" + KP = "KP" + KR = "KR" + KW = "KW" + KG = "KG" + LA = "LA" + LV = "LV" + LB = "LB" + LS = "LS" + LR = "LR" + LY = "LY" + LI = "LI" + LT = "LT" + LU = "LU" + MO = "MO" + MG = "MG" + MW = "MW" + MY = "MY" + MV = "MV" + ML = "ML" + MT = "MT" + MH = "MH" + MQ = "MQ" + MR = "MR" + MU = "MU" + YT = "YT" + MX = "MX" + FM = "FM" + MD = "MD" + MC = "MC" + MN = "MN" + MS = "MS" + MA = "MA" + MZ = "MZ" + MM = "MM" + NA = "NA" + NR = "NR" + NP = "NP" + NL = "NL" + NC = "NC" + NZ = "NZ" + NI = "NI" + NE = "NE" + NG = "NG" + NU = "NU" + NF = "NF" + MK = "MK" + MP = "MP" + NO = "NO" + OM = "OM" + PK = "PK" + PW = "PW" + PS = "PS" + PA = "PA" + PG = "PG" + PY = "PY" + PE = "PE" + PH = "PH" + PN = "PN" + PL = "PL" + PT = "PT" + PR = "PR" + QA = "QA" + RE = "RE" + RO = "RO" + RU = "RU" + RW = "RW" + SH = "SH" + KN = "KN" + LC = "LC" + PM = "PM" + VC = "VC" + WS = "WS" + SM = "SM" + ST = "ST" + SA = "SA" + SN = "SN" + SC = "SC" + SL = "SL" + SG = "SG" + SK = "SK" + SI = "SI" + SB = "SB" + SO = "SO" + ZA = "ZA" + GS = "GS" + ES = "ES" + LK = "LK" + SD = "SD" + SR = "SR" + SJ = "SJ" + SZ = "SZ" + SE = "SE" + CH = "CH" + SY = "SY" + TW = "TW" + TJ = "TJ" + TZ = "TZ" + TH = "TH" + TL = "TL" + TG = "TG" + TK = "TK" + TO = "TO" + TT = "TT" + TN = "TN" + TR = "TR" + TM = "TM" + TC = "TC" + TV = "TV" + UG = "UG" + UA = "UA" + AE = "AE" + GB = "GB" + US = "US" + UM = "UM" + UY = "UY" + UZ = "UZ" + VU = "VU" + VE = "VE" + VN = "VN" + VG = "VG" + VI = "VI" + WF = "WF" + EH = "EH" + YE = "YE" + ZM = "ZM" + ZW = "ZW" + AX = "AX" + BQ = "BQ" + CW = "CW" + GG = "GG" + IM = "IM" + JE = "JE" + ME = "ME" + BL = "BL" + MF = "MF" + RS = "RS" + SX = "SX" + SS = "SS" + XK = "XK" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesCountries: - country: SalesCountriesCountry = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country') }}) +class SalesCountriesTypedDict(TypedDict): + country: SalesCountriesCountry r"""The 2-letter country code: https://d.to/geo""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this country""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this country""" +class SalesCountries(BaseModel): + country: SalesCountriesCountry + r"""The 2-letter country code: https://d.to/geo""" + sales: float + r"""The number of sales from this country""" + amount: float + r"""The total amount of sales from this country""" + diff --git a/src/dub/models/components/salesdevices.py b/src/dub/models/components/salesdevices.py index 1636d98..60ea7b9 100644 --- a/src/dub/models/components/salesdevices.py +++ b/src/dub/models/components/salesdevices.py @@ -1,19 +1,24 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesDevices: - device: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('device') }}) +class SalesDevicesTypedDict(TypedDict): + device: str r"""The name of the device""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this device""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this device""" +class SalesDevices(BaseModel): + device: str + r"""The name of the device""" + sales: float + r"""The number of sales from this device""" + amount: float + r"""The total amount of sales from this device""" + diff --git a/src/dub/models/components/salesos.py b/src/dub/models/components/salesos.py index f81b335..adeb128 100644 --- a/src/dub/models/components/salesos.py +++ b/src/dub/models/components/salesos.py @@ -1,19 +1,24 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesOS: - os: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('os') }}) +class SalesOSTypedDict(TypedDict): + os: str r"""The name of the OS""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this OS""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this OS""" +class SalesOS(BaseModel): + os: str + r"""The name of the OS""" + sales: float + r"""The number of sales from this OS""" + amount: float + r"""The total amount of sales from this OS""" + diff --git a/src/dub/models/components/salesreferers.py b/src/dub/models/components/salesreferers.py index fca54ec..fa408e8 100644 --- a/src/dub/models/components/salesreferers.py +++ b/src/dub/models/components/salesreferers.py @@ -1,19 +1,24 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesReferers: - referer: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('referer') }}) +class SalesReferersTypedDict(TypedDict): + referer: str r"""The name of the referer. If unknown, this will be `(direct)`""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this referer""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this referer""" +class SalesReferers(BaseModel): + referer: str + r"""The name of the referer. If unknown, this will be `(direct)`""" + sales: float + r"""The number of sales from this referer""" + amount: float + r"""The total amount of sales from this referer""" + diff --git a/src/dub/models/components/salestimeseries.py b/src/dub/models/components/salestimeseries.py index 010aa9e..2b39bae 100644 --- a/src/dub/models/components/salestimeseries.py +++ b/src/dub/models/components/salestimeseries.py @@ -1,19 +1,24 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesTimeseries: - start: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start') }}) +class SalesTimeseriesTypedDict(TypedDict): + start: str r"""The starting timestamp of the interval""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales in the interval""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales in the interval""" +class SalesTimeseries(BaseModel): + start: str + r"""The starting timestamp of the interval""" + sales: float + r"""The number of sales in the interval""" + amount: float + r"""The total amount of sales in the interval""" + diff --git a/src/dub/models/components/salestoplinks.py b/src/dub/models/components/salestoplinks.py index 0ebbda1..d66b574 100644 --- a/src/dub/models/components/salestoplinks.py +++ b/src/dub/models/components/salestoplinks.py @@ -1,34 +1,50 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import pydantic +from typing import TypedDict +from typing_extensions import Annotated -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesTopLinks: - link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('link') }}) - r"""The unique ID of the short link - - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class SalesTopLinksTypedDict(TypedDict): + link: str + r"""The unique ID of the short link""" + id: str r"""The unique ID of the short link""" - domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) + domain: str r"""The domain of the short link""" - key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }}) + key: str r"""The key of the short link""" - short_link: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shortLink') }}) + short_link: str r"""The short link URL""" - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) + url: str r"""The destination URL of the short link""" - created_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('createdAt') }}) + created_at: str r"""The creation timestamp of the short link""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this link""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this link""" +class SalesTopLinks(BaseModel): + link: Annotated[str, pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.")] + r"""The unique ID of the short link""" + id: str + r"""The unique ID of the short link""" + domain: str + r"""The domain of the short link""" + key: str + r"""The key of the short link""" + short_link: Annotated[str, pydantic.Field(alias="shortLink")] + r"""The short link URL""" + url: str + r"""The destination URL of the short link""" + created_at: Annotated[str, pydantic.Field(alias="createdAt")] + r"""The creation timestamp of the short link""" + sales: float + r"""The number of sales from this link""" + amount: float + r"""The total amount of sales from this link""" + diff --git a/src/dub/models/components/salestopurls.py b/src/dub/models/components/salestopurls.py index 2f32ec0..d839742 100644 --- a/src/dub/models/components/salestopurls.py +++ b/src/dub/models/components/salestopurls.py @@ -1,19 +1,24 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from typing import TypedDict -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SalesTopUrls: - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class SalesTopUrlsTypedDict(TypedDict): + url: str r"""The destination URL""" - sales: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sales') }}) + sales: float r"""The number of sales from this URL""" - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: float r"""The total amount of sales from this URL""" +class SalesTopUrls(BaseModel): + url: str + r"""The destination URL""" + sales: float + r"""The number of sales from this URL""" + amount: float + r"""The total amount of sales from this URL""" + diff --git a/src/dub/models/components/security.py b/src/dub/models/components/security.py index 2372795..c953b35 100644 --- a/src/dub/models/components/security.py +++ b/src/dub/models/components/security.py @@ -1,12 +1,16 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, SecurityMetadata +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class Security: - token: Optional[str] = dataclasses.field(default=None, metadata={'security': { 'scheme': True, 'type': 'http', 'sub_type': 'bearer', 'field_name': 'Authorization' }}) +class SecurityTypedDict(TypedDict): + token: NotRequired[str] +class Security(BaseModel): + token: Annotated[Optional[str], FieldMetadata(security=SecurityMetadata(scheme=True, scheme_type="http", sub_type="bearer", field_name="Authorization"))] = None + diff --git a/src/dub/models/components/tagschema.py b/src/dub/models/components/tagschema.py index 0a95351..a55ffc0 100644 --- a/src/dub/models/components/tagschema.py +++ b/src/dub/models/components/tagschema.py @@ -1,31 +1,36 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel from enum import Enum +from typing import TypedDict class Color(str, Enum): r"""The color of the tag.""" - RED = 'red' - YELLOW = 'yellow' - GREEN = 'green' - BLUE = 'blue' - PURPLE = 'purple' - PINK = 'pink' - BROWN = 'brown' + RED = "red" + YELLOW = "yellow" + GREEN = "green" + BLUE = "blue" + PURPLE = "purple" + PINK = "pink" + BROWN = "brown" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TagSchema: - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class TagSchemaTypedDict(TypedDict): + id: str r"""The unique ID of the tag.""" - name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + name: str r"""The name of the tag.""" - color: Color = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color') }}) + color: Color r"""The color of the tag.""" +class TagSchema(BaseModel): + id: str + r"""The unique ID of the tag.""" + name: str + r"""The name of the tag.""" + color: Color + r"""The color of the tag.""" + diff --git a/src/dub/models/components/workspaceschema.py b/src/dub/models/components/workspaceschema.py index 7e9b5b8..a83e536 100644 --- a/src/dub/models/components/workspaceschema.py +++ b/src/dub/models/components/workspaceschema.py @@ -1,93 +1,162 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel, Nullable from enum import Enum -from typing import List, Optional +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict +from typing_extensions import Annotated, NotRequired class Plan(str, Enum): r"""The plan of the workspace.""" - FREE = 'free' - PRO = 'pro' - BUSINESS = 'business' - BUSINESS_PLUS = 'business plus' - BUSINESS_EXTRA = 'business extra' - BUSINESS_MAX = 'business max' - ENTERPRISE = 'enterprise' + FREE = "free" + PRO = "pro" + BUSINESS = "business" + BUSINESS_PLUS = "business plus" + BUSINESS_EXTRA = "business extra" + BUSINESS_MAX = "business max" + ENTERPRISE = "enterprise" class Role(str, Enum): r"""The role of the authenticated user in the workspace.""" - OWNER = 'owner' - MEMBER = 'member' + OWNER = "owner" + MEMBER = "member" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Users: - role: Role = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role') }}) +class UsersTypedDict(TypedDict): + role: Role r"""The role of the authenticated user in the workspace.""" +class Users(BaseModel): + role: Role + r"""The role of the authenticated user in the workspace.""" + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Domains: - slug: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slug') }}) +class DomainsTypedDict(TypedDict): + slug: str r"""The domain name.""" - primary: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary'), 'exclude': lambda f: f is None }}) + primary: NotRequired[bool] r"""Whether the domain is the primary domain for the workspace.""" +class Domains(BaseModel): + slug: str + r"""The domain name.""" + primary: Optional[bool] = False + r"""Whether the domain is the primary domain for the workspace.""" + +class WorkspaceSchemaTypedDict(TypedDict): + id: str + r"""The unique ID of the workspace.""" + name: str + r"""The name of the workspace.""" + slug: str + r"""The slug of the workspace.""" + usage: float + r"""The usage of the workspace.""" + usage_limit: float + r"""The usage limit of the workspace.""" + links_usage: float + r"""The links usage of the workspace.""" + links_limit: float + r"""The links limit of the workspace.""" + domains_limit: float + r"""The domains limit of the workspace.""" + tags_limit: float + r"""The tags limit of the workspace.""" + users_limit: float + r"""The users limit of the workspace.""" + plan: Plan + r"""The plan of the workspace.""" + stripe_id: Nullable[str] + r"""The Stripe ID of the workspace.""" + billing_cycle_start: float + r"""The date and time when the billing cycle starts for the workspace.""" + stripe_connect_id: Nullable[str] + r"""[BETA]: The Stripe Connect ID of the workspace.""" + created_at: str + r"""The date and time when the workspace was created.""" + users: List[UsersTypedDict] + r"""The role of the authenticated user in the workspace.""" + domains: List[DomainsTypedDict] + r"""The domains of the workspace.""" + invite_code: Nullable[str] + r"""The invite code of the workspace.""" + logo: NotRequired[Nullable[str]] + r"""The logo of the workspace.""" + beta_tester: NotRequired[bool] + r"""Whether the workspace is enrolled in the beta testing program.""" + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class WorkspaceSchema: - UNSET='__SPEAKEASY_UNSET__' - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) +class WorkspaceSchema(BaseModel): + id: str r"""The unique ID of the workspace.""" - name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + name: str r"""The name of the workspace.""" - slug: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slug') }}) + slug: str r"""The slug of the workspace.""" - usage: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('usage') }}) + usage: float r"""The usage of the workspace.""" - usage_limit: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('usageLimit') }}) + usage_limit: Annotated[float, pydantic.Field(alias="usageLimit")] r"""The usage limit of the workspace.""" - links_usage: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('linksUsage') }}) + links_usage: Annotated[float, pydantic.Field(alias="linksUsage")] r"""The links usage of the workspace.""" - links_limit: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('linksLimit') }}) + links_limit: Annotated[float, pydantic.Field(alias="linksLimit")] r"""The links limit of the workspace.""" - domains_limit: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domainsLimit') }}) + domains_limit: Annotated[float, pydantic.Field(alias="domainsLimit")] r"""The domains limit of the workspace.""" - tags_limit: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagsLimit') }}) + tags_limit: Annotated[float, pydantic.Field(alias="tagsLimit")] r"""The tags limit of the workspace.""" - users_limit: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('usersLimit') }}) + users_limit: Annotated[float, pydantic.Field(alias="usersLimit")] r"""The users limit of the workspace.""" - plan: Plan = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('plan') }}) + plan: Plan r"""The plan of the workspace.""" - stripe_id: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stripeId') }}) + stripe_id: Annotated[Nullable[str], pydantic.Field(alias="stripeId")] r"""The Stripe ID of the workspace.""" - billing_cycle_start: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('billingCycleStart') }}) + billing_cycle_start: Annotated[float, pydantic.Field(alias="billingCycleStart")] r"""The date and time when the billing cycle starts for the workspace.""" - stripe_connect_id: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stripeConnectId') }}) + stripe_connect_id: Annotated[Nullable[str], pydantic.Field(alias="stripeConnectId")] r"""[BETA]: The Stripe Connect ID of the workspace.""" - created_at: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('createdAt') }}) + created_at: Annotated[str, pydantic.Field(alias="createdAt")] r"""The date and time when the workspace was created.""" - users: List[Users] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('users') }}) + users: List[Users] r"""The role of the authenticated user in the workspace.""" - domains: List[Domains] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domains') }}) + domains: List[Domains] r"""The domains of the workspace.""" - invite_code: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('inviteCode') }}) + invite_code: Annotated[Nullable[str], pydantic.Field(alias="inviteCode")] r"""The invite code of the workspace.""" - logo: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('logo'), 'exclude': lambda f: f is WorkspaceSchema.UNSET }}) + logo: Optional[Nullable[str]] = None r"""The logo of the workspace.""" - beta_tester: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('betaTester'), 'exclude': lambda f: f is None }}) + beta_tester: Annotated[Optional[bool], pydantic.Field(alias="betaTester")] = None r"""Whether the workspace is enrolled in the beta testing program.""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["logo", "betaTester"] + nullable_fields = ["stripeId", "stripeConnectId", "inviteCode", "logo"] + null_default_fields = ["logo"] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/errors/badrequest.py b/src/dub/models/errors/badrequest.py index 3d4c12e..cf3b588 100644 --- a/src/dub/models/errors/badrequest.py +++ b/src/dub/models/errors/badrequest.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class Code(str, Enum): r"""A short code indicating the error code returned.""" - BAD_REQUEST = 'bad_request' + BAD_REQUEST = "bad_request" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Error: - code: Code = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class ErrorTypedDict(TypedDict): + code: Code r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class Error(BaseModel): + code: Code + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class BadRequestData(BaseModel): + error: Error + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class BadRequest(Exception): r"""The server cannot or will not process the request due to something that is perceived to be a client error (e.g., malformed request syntax, invalid request message framing, or deceptive request routing).""" - error: Error = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: BadRequestData + + def __init__(self, data: BadRequestData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, BadRequestData) + diff --git a/src/dub/models/errors/conflict.py b/src/dub/models/errors/conflict.py index 3b54dc4..f066ff7 100644 --- a/src/dub/models/errors/conflict.py +++ b/src/dub/models/errors/conflict.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class ConflictCode(str, Enum): r"""A short code indicating the error code returned.""" - CONFLICT = 'conflict' + CONFLICT = "conflict" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ConflictError: - code: ConflictCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class ConflictErrorTypedDict(TypedDict): + code: ConflictCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class ConflictError(BaseModel): + code: ConflictCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class ConflictData(BaseModel): + error: ConflictError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class Conflict(Exception): r"""This response is sent when a request conflicts with the current state of the server.""" - error: ConflictError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: ConflictData + + def __init__(self, data: ConflictData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, ConflictData) + diff --git a/src/dub/models/errors/forbidden.py b/src/dub/models/errors/forbidden.py index beeeaba..b9a5326 100644 --- a/src/dub/models/errors/forbidden.py +++ b/src/dub/models/errors/forbidden.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class ForbiddenCode(str, Enum): r"""A short code indicating the error code returned.""" - FORBIDDEN = 'forbidden' + FORBIDDEN = "forbidden" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ForbiddenError: - code: ForbiddenCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class ForbiddenErrorTypedDict(TypedDict): + code: ForbiddenCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class ForbiddenError(BaseModel): + code: ForbiddenCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class ForbiddenData(BaseModel): + error: ForbiddenError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class Forbidden(Exception): r"""The client does not have access rights to the content; that is, it is unauthorized, so the server is refusing to give the requested resource. Unlike 401 Unauthorized, the client's identity is known to the server.""" - error: ForbiddenError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: ForbiddenData + + def __init__(self, data: ForbiddenData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, ForbiddenData) + diff --git a/src/dub/models/errors/internalservererror.py b/src/dub/models/errors/internalservererror.py index 38a8175..05bea03 100644 --- a/src/dub/models/errors/internalservererror.py +++ b/src/dub/models/errors/internalservererror.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class InternalServerErrorCode(str, Enum): r"""A short code indicating the error code returned.""" - INTERNAL_SERVER_ERROR = 'internal_server_error' + INTERNAL_SERVER_ERROR = "internal_server_error" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class InternalServerErrorError: - code: InternalServerErrorCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class InternalServerErrorErrorTypedDict(TypedDict): + code: InternalServerErrorCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class InternalServerErrorError(BaseModel): + code: InternalServerErrorCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class InternalServerErrorData(BaseModel): + error: InternalServerErrorError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class InternalServerError(Exception): r"""The server has encountered a situation it does not know how to handle.""" - error: InternalServerErrorError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: InternalServerErrorData + + def __init__(self, data: InternalServerErrorData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, InternalServerErrorData) + diff --git a/src/dub/models/errors/inviteexpired.py b/src/dub/models/errors/inviteexpired.py index 81a4a7d..965fc08 100644 --- a/src/dub/models/errors/inviteexpired.py +++ b/src/dub/models/errors/inviteexpired.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class InviteExpiredCode(str, Enum): r"""A short code indicating the error code returned.""" - INVITE_EXPIRED = 'invite_expired' + INVITE_EXPIRED = "invite_expired" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class InviteExpiredError: - code: InviteExpiredCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class InviteExpiredErrorTypedDict(TypedDict): + code: InviteExpiredCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class InviteExpiredError(BaseModel): + code: InviteExpiredCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class InviteExpiredData(BaseModel): + error: InviteExpiredError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class InviteExpired(Exception): r"""This response is sent when the requested content has been permanently deleted from server, with no forwarding address.""" - error: InviteExpiredError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: InviteExpiredData + + def __init__(self, data: InviteExpiredData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, InviteExpiredData) + diff --git a/src/dub/models/errors/notfound.py b/src/dub/models/errors/notfound.py index 5751226..77efdb0 100644 --- a/src/dub/models/errors/notfound.py +++ b/src/dub/models/errors/notfound.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class NotFoundCode(str, Enum): r"""A short code indicating the error code returned.""" - NOT_FOUND = 'not_found' + NOT_FOUND = "not_found" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class NotFoundError: - code: NotFoundCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class NotFoundErrorTypedDict(TypedDict): + code: NotFoundCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class NotFoundError(BaseModel): + code: NotFoundCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class NotFoundData(BaseModel): + error: NotFoundError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class NotFound(Exception): r"""The server cannot find the requested resource.""" - error: NotFoundError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: NotFoundData + + def __init__(self, data: NotFoundData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, NotFoundData) + diff --git a/src/dub/models/errors/ratelimitexceeded.py b/src/dub/models/errors/ratelimitexceeded.py index 8ed47c8..91eb875 100644 --- a/src/dub/models/errors/ratelimitexceeded.py +++ b/src/dub/models/errors/ratelimitexceeded.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class RateLimitExceededCode(str, Enum): r"""A short code indicating the error code returned.""" - RATE_LIMIT_EXCEEDED = 'rate_limit_exceeded' + RATE_LIMIT_EXCEEDED = "rate_limit_exceeded" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class RateLimitExceededError: - code: RateLimitExceededCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class RateLimitExceededErrorTypedDict(TypedDict): + code: RateLimitExceededCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class RateLimitExceededError(BaseModel): + code: RateLimitExceededCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class RateLimitExceededData(BaseModel): + error: RateLimitExceededError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class RateLimitExceeded(Exception): - r"""The user has sent too many requests in a given amount of time (\\"rate limiting\\")""" - error: RateLimitExceededError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + r"""The user has sent too many requests in a given amount of time (\"rate limiting\")""" + data: RateLimitExceededData + + def __init__(self, data: RateLimitExceededData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, RateLimitExceededData) + diff --git a/src/dub/models/errors/sdkerror.py b/src/dub/models/errors/sdkerror.py index 6bb02bb..cc9bf0f 100644 --- a/src/dub/models/errors/sdkerror.py +++ b/src/dub/models/errors/sdkerror.py @@ -1,24 +1,22 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http +from dataclasses import dataclass +from typing import Optional +import httpx +@dataclass class SDKError(Exception): """Represents an error returned by the API.""" - message: str - status_code: int - body: str - raw_response: requests_http.Response - def __init__(self, message: str, status_code: int, body: str, raw_response: requests_http.Response): - self.message = message - self.status_code = status_code - self.body = body - self.raw_response = raw_response + message: str + status_code: int = -1 + body: str = "" + raw_response: Optional[httpx.Response] = None def __str__(self): - body = '' + body = "" if len(self.body) > 0: - body = f'\n{self.body}' + body = f"\n{self.body}" - return f'{self.message}: Status {self.status_code}{body}' + return f"{self.message}: Status {self.status_code}{body}" diff --git a/src/dub/models/errors/unauthorized.py b/src/dub/models/errors/unauthorized.py index 8c0804e..8a31fe2 100644 --- a/src/dub/models/errors/unauthorized.py +++ b/src/dub/models/errors/unauthorized.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class UnauthorizedCode(str, Enum): r"""A short code indicating the error code returned.""" - UNAUTHORIZED = 'unauthorized' + UNAUTHORIZED = "unauthorized" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UnauthorizedError: - code: UnauthorizedCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class UnauthorizedErrorTypedDict(TypedDict): + code: UnauthorizedCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class UnauthorizedError(BaseModel): + code: UnauthorizedCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class UnauthorizedData(BaseModel): + error: UnauthorizedError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class Unauthorized(Exception): - r"""Although the HTTP standard specifies \\"unauthorized\\", semantically this response means \\"unauthenticated\\". That is, the client must authenticate itself to get the requested response.""" - error: UnauthorizedError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + r"""Although the HTTP standard specifies \"unauthorized\", semantically this response means \"unauthenticated\". That is, the client must authenticate itself to get the requested response.""" + data: UnauthorizedData + + def __init__(self, data: UnauthorizedData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, UnauthorizedData) + diff --git a/src/dub/models/errors/unprocessableentity.py b/src/dub/models/errors/unprocessableentity.py index e687c91..c8cecdb 100644 --- a/src/dub/models/errors/unprocessableentity.py +++ b/src/dub/models/errors/unprocessableentity.py @@ -1,38 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +import dub.utils as utils from enum import Enum -from typing import Optional +from typing import Optional, TypedDict +from typing_extensions import NotRequired class UnprocessableEntityCode(str, Enum): r"""A short code indicating the error code returned.""" - UNPROCESSABLE_ENTITY = 'unprocessable_entity' + UNPROCESSABLE_ENTITY = "unprocessable_entity" -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UnprocessableEntityError: - code: UnprocessableEntityCode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('code') }}) +class UnprocessableEntityErrorTypedDict(TypedDict): + code: UnprocessableEntityCode r"""A short code indicating the error code returned.""" - message: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message') }}) + message: str r"""A human readable explanation of what went wrong.""" - doc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doc_url'), 'exclude': lambda f: f is None }}) + doc_url: NotRequired[str] r"""A link to our documentation with more details about this error code""" +class UnprocessableEntityError(BaseModel): + code: UnprocessableEntityCode + r"""A short code indicating the error code returned.""" + message: str + r"""A human readable explanation of what went wrong.""" + doc_url: Optional[str] = None + r"""A link to our documentation with more details about this error code""" + +class UnprocessableEntityData(BaseModel): + error: UnprocessableEntityError + -@dataclass_json(undefined=Undefined.EXCLUDE) - -@dataclasses.dataclass class UnprocessableEntity(Exception): r"""The request was well-formed but was unable to be followed due to semantic errors.""" - error: UnprocessableEntityError = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('error') }}) - + data: UnprocessableEntityData + + def __init__(self, data: UnprocessableEntityData): + self.data = data def __str__(self) -> str: - return utils.marshal_json(self, type(self)) + return utils.marshal_json(self.data, UnprocessableEntityData) + diff --git a/src/dub/models/internal/globals.py b/src/dub/models/internal/globals.py index 13dee5b..eed6dff 100644 --- a/src/dub/models/internal/globals.py +++ b/src/dub/models/internal/globals.py @@ -1,15 +1,19 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class Globals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" - project_slug: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'projectSlug', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class GlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] + project_slug: NotRequired[str] +class Globals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + project_slug: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="projectSlug"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + diff --git a/src/dub/models/operations/__init__.py b/src/dub/models/operations/__init__.py index 31495a0..268d25f 100644 --- a/src/dub/models/operations/__init__.py +++ b/src/dub/models/operations/__init__.py @@ -25,4 +25,4 @@ from .updateworkspace import * from .upsertlink import * -__all__ = ["BulkCreateLinksGlobals","BulkCreateLinksTagIds","BulkCreateLinksTagNames","BulkUpdateLinksRequestBody","BulkUpdateLinksTagIds","BulkUpdateLinksTagNames","Color","CreateDomainGlobals","CreateDomainRequestBody","CreateLinkGlobals","CreateLinkRequestBody","CreateTagGlobals","CreateTagRequestBody","Data","DeleteDomainGlobals","DeleteDomainRequest","DeleteDomainResponseBody","DeleteLinkGlobals","DeleteLinkRequest","DeleteLinkResponseBody","Event","GetLinkInfoGlobals","GetLinkInfoRequest","GetLinksCountGlobals","GetLinksCountQueryParamTagIds","GetLinksCountQueryParamTagNames","GetLinksCountRequest","GetLinksGlobals","GetLinksRequest","GetMetatagsRequest","GetMetatagsResponseBody","GetQRCodeRequest","GetTagsGlobals","GetTagsRequest","GetWorkspaceRequest","GroupBy","Interval","Level","ListDomainsGlobals","ListDomainsRequest","One","PaymentProcessor","QueryParamGroupBy","QueryParamTagIds","QueryParamTagNames","RequestBody","RetrieveAnalyticsGlobals","RetrieveAnalyticsRequest","RetrieveAnalyticsResponseBody","Sort","TagIds","TagNames","TrackCustomerGlobals","TrackCustomerRequestBody","TrackCustomerResponseBody","TrackLeadGlobals","TrackLeadRequestBody","TrackLeadResponseBody","TrackSaleGlobals","TrackSaleRequestBody","TrackSaleResponseBody","Two","UpdateDomainGlobals","UpdateDomainRequest","UpdateDomainRequestBody","UpdateLinkGlobals","UpdateLinkRequest","UpdateLinkRequestBody","UpdateLinkTagIds","UpdateLinkTagNames","UpdateTagColor","UpdateTagGlobals","UpdateTagRequest","UpdateTagRequestBody","UpdateWorkspaceRequest","UpsertLinkGlobals","UpsertLinkRequestBody","UpsertLinkTagIds","UpsertLinkTagNames"] +__all__ = ["BulkCreateLinksGlobals","BulkCreateLinksTagIds","BulkCreateLinksTagNames","BulkUpdateLinksRequestBody","BulkUpdateLinksTagIds","BulkUpdateLinksTagNames","Color","CreateDomainGlobals","CreateDomainRequestBody","CreateLinkGlobals","CreateLinkRequestBody","CreateTagGlobals","CreateTagRequestBody","Data","DeleteDomainGlobals","DeleteDomainRequest","DeleteDomainResponseBody","DeleteLinkGlobals","DeleteLinkRequest","DeleteLinkResponseBody","Event","GetLinkInfoGlobals","GetLinkInfoRequest","GetLinksCountGlobals","GetLinksCountQueryParamTagIds","GetLinksCountQueryParamTagNames","GetLinksCountRequest","GetLinksGlobals","GetLinksRequest","GetMetatagsRequest","GetMetatagsResponseBody","GetQRCodeRequest","GetTagsGlobals","GetTagsRequest","GetWorkspaceRequest","GroupBy","Interval","Level","ListDomainsGlobals","ListDomainsRequest","One","PaymentProcessor","QueryParamGroupBy","QueryParamTagIds","QueryParamTagNames","RequestBody","RetrieveAnalyticsGlobals","RetrieveAnalyticsRequest","RetrieveAnalyticsResponseBody","Sort","TagIds","TagNames","TrackCustomerGlobals","TrackCustomerRequestBody","TrackCustomerResponseBody","TrackLeadGlobals","TrackLeadRequestBody","TrackLeadResponseBody","TrackSaleGlobals","TrackSaleRequestBody","TrackSaleResponseBody","Two","UpdateDomainGlobals","UpdateDomainRequest","UpdateDomainRequestBody","UpdateLinkGlobals","UpdateLinkRequest","UpdateLinkRequestBody","UpdateLinkTagIds","UpdateLinkTagNames","UpdateTagColor","UpdateTagGlobals","UpdateTagRequest","UpdateTagRequestBody","UpdateWorkspaceRequest","UpdateWorkspaceRequestBody","UpsertLinkGlobals","UpsertLinkRequestBody","UpsertLinkTagIds","UpsertLinkTagNames"] diff --git a/src/dub/models/operations/bulkcreatelinks.py b/src/dub/models/operations/bulkcreatelinks.py index 91ad9eb..f7f0ffd 100644 --- a/src/dub/models/operations/bulkcreatelinks.py +++ b/src/dub/models/operations/bulkcreatelinks.py @@ -1,79 +1,162 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from ...models.components import linkgeotargeting as components_linkgeotargeting -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import List, Optional, Union +from dub.models.components import linkgeotargeting as components_linkgeotargeting +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class BulkCreateLinksGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class BulkCreateLinksGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class BulkCreateLinksGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class RequestBody: - UNSET='__SPEAKEASY_UNSET__' - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class RequestBodyTypedDict(TypedDict): + url: str r"""The destination URL of the short link.""" - domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + domain: NotRequired[str] r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" - key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key'), 'exclude': lambda f: f is None }}) + key: NotRequired[str] r"""The short link slug. If not provided, a random 7-character slug will be generated.""" - external_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('externalId'), 'exclude': lambda f: f is RequestBody.UNSET }}) + external_id: NotRequired[Nullable[str]] r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" - prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) + prefix: NotRequired[str] r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" - track_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trackConversion'), 'exclude': lambda f: f is None }}) + track_conversion: NotRequired[bool] r"""Whether to track conversions for the short link.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: NotRequired[bool] r"""Whether the short link is archived.""" - public_stats: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicStats'), 'exclude': lambda f: f is None }}) + public_stats: NotRequired[bool] r"""Whether the short link's stats are publicly accessible.""" - tag_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId'), 'exclude': lambda f: f is RequestBody.UNSET }}) - r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. + tag_id: NotRequired[Nullable[str]] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: NotRequired[BulkCreateLinksTagIdsTypedDict] + r"""The unique IDs of the tags assigned to the short link.""" + tag_names: NotRequired[BulkCreateLinksTagNamesTypedDict] + r"""The unique name of the tags assigned to the short link (case insensitive).""" + comments: NotRequired[Nullable[str]] + r"""The comments for the short link.""" + expires_at: NotRequired[Nullable[str]] + r"""The date and time when the short link will expire at.""" + expired_url: NotRequired[Nullable[str]] + r"""The URL to redirect to when the short link has expired.""" + password: NotRequired[Nullable[str]] + r"""The password required to access the destination URL of the short link.""" + proxy: NotRequired[bool] + r"""Whether the short link uses Custom Social Media Cards feature.""" + title: NotRequired[Nullable[str]] + r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + description: NotRequired[Nullable[str]] + r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + image: NotRequired[Nullable[str]] + r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + rewrite: NotRequired[bool] + r"""Whether the short link uses link cloaking.""" + ios: NotRequired[Nullable[str]] + r"""The iOS destination URL for the short link for iOS device targeting.""" + android: NotRequired[Nullable[str]] + r"""The Android destination URL for the short link for Android device targeting.""" + geo: NotRequired[Nullable[components_linkgeotargeting.LinkGeoTargetingTypedDict]] + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" + do_index: NotRequired[bool] + r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - tag_ids: Optional[BulkCreateLinksTagIds] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagIds'), 'exclude': lambda f: f is None }}) +class RequestBody(BaseModel): + url: str + r"""The destination URL of the short link.""" + domain: Optional[str] = None + r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" + key: Optional[str] = None + r"""The short link slug. If not provided, a random 7-character slug will be generated.""" + external_id: Annotated[Optional[Nullable[str]], pydantic.Field(alias="externalId")] = None + r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" + prefix: Optional[str] = None + r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" + track_conversion: Annotated[Optional[bool], pydantic.Field(alias="trackConversion")] = False + r"""Whether to track conversions for the short link.""" + archived: Optional[bool] = False + r"""Whether the short link is archived.""" + public_stats: Annotated[Optional[bool], pydantic.Field(alias="publicStats")] = False + r"""Whether the short link's stats are publicly accessible.""" + tag_id: Annotated[Optional[Nullable[str]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="tagId")] = None + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[BulkCreateLinksTagIds], pydantic.Field(alias="tagIds")] = None r"""The unique IDs of the tags assigned to the short link.""" - tag_names: Optional[BulkCreateLinksTagNames] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagNames'), 'exclude': lambda f: f is None }}) + tag_names: Annotated[Optional[BulkCreateLinksTagNames], pydantic.Field(alias="tagNames")] = None r"""The unique name of the tags assigned to the short link (case insensitive).""" - comments: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comments'), 'exclude': lambda f: f is RequestBody.UNSET }}) + comments: Optional[Nullable[str]] = None r"""The comments for the short link.""" - expires_at: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiresAt'), 'exclude': lambda f: f is RequestBody.UNSET }}) + expires_at: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiresAt")] = None r"""The date and time when the short link will expire at.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is RequestBody.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""The URL to redirect to when the short link has expired.""" - password: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is RequestBody.UNSET }}) + password: Optional[Nullable[str]] = None r"""The password required to access the destination URL of the short link.""" - proxy: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('proxy'), 'exclude': lambda f: f is None }}) + proxy: Optional[bool] = False r"""Whether the short link uses Custom Social Media Cards feature.""" - title: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title'), 'exclude': lambda f: f is RequestBody.UNSET }}) + title: Optional[Nullable[str]] = None r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - description: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description'), 'exclude': lambda f: f is RequestBody.UNSET }}) + description: Optional[Nullable[str]] = None r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - image: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image'), 'exclude': lambda f: f is RequestBody.UNSET }}) + image: Optional[Nullable[str]] = None r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - rewrite: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rewrite'), 'exclude': lambda f: f is None }}) + rewrite: Optional[bool] = False r"""Whether the short link uses link cloaking.""" - ios: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ios'), 'exclude': lambda f: f is RequestBody.UNSET }}) + ios: Optional[Nullable[str]] = None r"""The iOS destination URL for the short link for iOS device targeting.""" - android: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('android'), 'exclude': lambda f: f is RequestBody.UNSET }}) + android: Optional[Nullable[str]] = None r"""The Android destination URL for the short link for Android device targeting.""" - geo: Optional[components_linkgeotargeting.LinkGeoTargeting] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo'), 'exclude': lambda f: f is RequestBody.UNSET }}) + geo: Optional[Nullable[components_linkgeotargeting.LinkGeoTargeting]] = None r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" - do_index: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doIndex'), 'exclude': lambda f: f is None }}) + do_index: Annotated[Optional[bool], pydantic.Field(alias="doIndex")] = False r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["domain", "key", "externalId", "prefix", "trackConversion", "archived", "publicStats", "tagId", "tagIds", "tagNames", "comments", "expiresAt", "expiredUrl", "password", "proxy", "title", "description", "image", "rewrite", "ios", "android", "geo", "doIndex"] + nullable_fields = ["externalId", "tagId", "comments", "expiresAt", "expiredUrl", "password", "title", "description", "image", "ios", "android", "geo"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + + return m + + +BulkCreateLinksTagIdsTypedDict = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" BulkCreateLinksTagIds = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + + +BulkCreateLinksTagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + BulkCreateLinksTagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + diff --git a/src/dub/models/operations/bulkupdatelinks.py b/src/dub/models/operations/bulkupdatelinks.py index b4ba1e2..a78fa98 100644 --- a/src/dub/models/operations/bulkupdatelinks.py +++ b/src/dub/models/operations/bulkupdatelinks.py @@ -1,72 +1,147 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from ...models.components import linkgeotargeting as components_linkgeotargeting -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import List, Optional, Union - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Data: - UNSET='__SPEAKEASY_UNSET__' - url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url'), 'exclude': lambda f: f is None }}) +from dub.models.components import linkgeotargeting as components_linkgeotargeting +from dub.types import BaseModel, Nullable +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired + + +class DataTypedDict(TypedDict): + url: NotRequired[str] r"""The destination URL of the short link.""" - track_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trackConversion'), 'exclude': lambda f: f is None }}) + track_conversion: NotRequired[bool] r"""Whether to track conversions for the short link.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: NotRequired[bool] r"""Whether the short link is archived.""" - public_stats: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicStats'), 'exclude': lambda f: f is None }}) + public_stats: NotRequired[bool] r"""Whether the short link's stats are publicly accessible.""" - tag_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId'), 'exclude': lambda f: f is Data.UNSET }}) - r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. + tag_id: NotRequired[Nullable[str]] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: NotRequired[BulkUpdateLinksTagIdsTypedDict] + r"""The unique IDs of the tags assigned to the short link.""" + tag_names: NotRequired[BulkUpdateLinksTagNamesTypedDict] + r"""The unique name of the tags assigned to the short link (case insensitive).""" + comments: NotRequired[Nullable[str]] + r"""The comments for the short link.""" + expires_at: NotRequired[Nullable[str]] + r"""The date and time when the short link will expire at.""" + expired_url: NotRequired[Nullable[str]] + r"""The URL to redirect to when the short link has expired.""" + password: NotRequired[Nullable[str]] + r"""The password required to access the destination URL of the short link.""" + proxy: NotRequired[bool] + r"""Whether the short link uses Custom Social Media Cards feature.""" + title: NotRequired[Nullable[str]] + r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + description: NotRequired[Nullable[str]] + r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + image: NotRequired[Nullable[str]] + r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + rewrite: NotRequired[bool] + r"""Whether the short link uses link cloaking.""" + ios: NotRequired[Nullable[str]] + r"""The iOS destination URL for the short link for iOS device targeting.""" + android: NotRequired[Nullable[str]] + r"""The Android destination URL for the short link for Android device targeting.""" + geo: NotRequired[Nullable[components_linkgeotargeting.LinkGeoTargetingTypedDict]] + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" + do_index: NotRequired[bool] + r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - tag_ids: Optional[BulkUpdateLinksTagIds] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagIds'), 'exclude': lambda f: f is None }}) +class Data(BaseModel): + url: Optional[str] = None + r"""The destination URL of the short link.""" + track_conversion: Annotated[Optional[bool], pydantic.Field(alias="trackConversion")] = False + r"""Whether to track conversions for the short link.""" + archived: Optional[bool] = False + r"""Whether the short link is archived.""" + public_stats: Annotated[Optional[bool], pydantic.Field(alias="publicStats")] = False + r"""Whether the short link's stats are publicly accessible.""" + tag_id: Annotated[Optional[Nullable[str]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="tagId")] = None + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[BulkUpdateLinksTagIds], pydantic.Field(alias="tagIds")] = None r"""The unique IDs of the tags assigned to the short link.""" - tag_names: Optional[BulkUpdateLinksTagNames] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagNames'), 'exclude': lambda f: f is None }}) + tag_names: Annotated[Optional[BulkUpdateLinksTagNames], pydantic.Field(alias="tagNames")] = None r"""The unique name of the tags assigned to the short link (case insensitive).""" - comments: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comments'), 'exclude': lambda f: f is Data.UNSET }}) + comments: Optional[Nullable[str]] = None r"""The comments for the short link.""" - expires_at: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiresAt'), 'exclude': lambda f: f is Data.UNSET }}) + expires_at: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiresAt")] = None r"""The date and time when the short link will expire at.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is Data.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""The URL to redirect to when the short link has expired.""" - password: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is Data.UNSET }}) + password: Optional[Nullable[str]] = None r"""The password required to access the destination URL of the short link.""" - proxy: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('proxy'), 'exclude': lambda f: f is None }}) + proxy: Optional[bool] = False r"""Whether the short link uses Custom Social Media Cards feature.""" - title: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title'), 'exclude': lambda f: f is Data.UNSET }}) + title: Optional[Nullable[str]] = None r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - description: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description'), 'exclude': lambda f: f is Data.UNSET }}) + description: Optional[Nullable[str]] = None r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - image: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image'), 'exclude': lambda f: f is Data.UNSET }}) + image: Optional[Nullable[str]] = None r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - rewrite: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rewrite'), 'exclude': lambda f: f is None }}) + rewrite: Optional[bool] = False r"""Whether the short link uses link cloaking.""" - ios: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ios'), 'exclude': lambda f: f is Data.UNSET }}) + ios: Optional[Nullable[str]] = None r"""The iOS destination URL for the short link for iOS device targeting.""" - android: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('android'), 'exclude': lambda f: f is Data.UNSET }}) + android: Optional[Nullable[str]] = None r"""The Android destination URL for the short link for Android device targeting.""" - geo: Optional[components_linkgeotargeting.LinkGeoTargeting] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo'), 'exclude': lambda f: f is Data.UNSET }}) + geo: Optional[Nullable[components_linkgeotargeting.LinkGeoTargeting]] = None r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" - do_index: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doIndex'), 'exclude': lambda f: f is None }}) + do_index: Annotated[Optional[bool], pydantic.Field(alias="doIndex")] = False r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["url", "trackConversion", "archived", "publicStats", "tagId", "tagIds", "tagNames", "comments", "expiresAt", "expiredUrl", "password", "proxy", "title", "description", "image", "rewrite", "ios", "android", "geo", "doIndex"] + nullable_fields = ["tagId", "comments", "expiresAt", "expiredUrl", "password", "title", "description", "image", "ios", "android", "geo"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class BulkUpdateLinksRequestBody: - link_ids: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('linkIds') }}) - data: Data = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data') }}) + return m + + +class BulkUpdateLinksRequestBodyTypedDict(TypedDict): + link_ids: List[str] + data: DataTypedDict + + +class BulkUpdateLinksRequestBody(BaseModel): + link_ids: Annotated[List[str], pydantic.Field(alias="linkIds")] + data: Data +BulkUpdateLinksTagIdsTypedDict = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + BulkUpdateLinksTagIds = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + + +BulkUpdateLinksTagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + BulkUpdateLinksTagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + diff --git a/src/dub/models/operations/createdomain.py b/src/dub/models/operations/createdomain.py index 35be167..e8681ce 100644 --- a/src/dub/models/operations/createdomain.py +++ b/src/dub/models/operations/createdomain.py @@ -1,31 +1,65 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from pydantic import model_serializer +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class CreateDomainGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class CreateDomainGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class CreateDomainGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + +class CreateDomainRequestBodyTypedDict(TypedDict): + slug: str + r"""Name of the domain.""" + expired_url: NotRequired[Nullable[str]] + r"""Redirect users to a specific URL when any link under this domain has expired.""" + archived: NotRequired[bool] + r"""Whether to archive this domain. `false` will unarchive a previously archived domain.""" + placeholder: NotRequired[Nullable[str]] + r"""Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened.""" + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class CreateDomainRequestBody: - UNSET='__SPEAKEASY_UNSET__' - slug: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slug') }}) +class CreateDomainRequestBody(BaseModel): + slug: str r"""Name of the domain.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is CreateDomainRequestBody.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""Redirect users to a specific URL when any link under this domain has expired.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: Optional[bool] = False r"""Whether to archive this domain. `false` will unarchive a previously archived domain.""" - placeholder: Optional[str] = dataclasses.field(default='https://dub.co/help/article/what-is-dub', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('placeholder'), 'exclude': lambda f: f is CreateDomainRequestBody.UNSET }}) + placeholder: Optional[Nullable[str]] = "https://dub.co/help/article/what-is-dub" r"""Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened.""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["expiredUrl", "archived", "placeholder"] + nullable_fields = ["expiredUrl", "placeholder"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/operations/createlink.py b/src/dub/models/operations/createlink.py index 0545618..0a057de 100644 --- a/src/dub/models/operations/createlink.py +++ b/src/dub/models/operations/createlink.py @@ -1,79 +1,162 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from ...models.components import linkgeotargeting as components_linkgeotargeting -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import List, Optional, Union +from dub.models.components import linkgeotargeting as components_linkgeotargeting +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class CreateLinkGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class CreateLinkGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class CreateLinkGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class CreateLinkRequestBody: - UNSET='__SPEAKEASY_UNSET__' - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class CreateLinkRequestBodyTypedDict(TypedDict): + url: str r"""The destination URL of the short link.""" - domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + domain: NotRequired[str] r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" - key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key'), 'exclude': lambda f: f is None }}) + key: NotRequired[str] r"""The short link slug. If not provided, a random 7-character slug will be generated.""" - external_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('externalId'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + external_id: NotRequired[Nullable[str]] r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" - prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) + prefix: NotRequired[str] r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" - track_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trackConversion'), 'exclude': lambda f: f is None }}) + track_conversion: NotRequired[bool] r"""Whether to track conversions for the short link.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: NotRequired[bool] r"""Whether the short link is archived.""" - public_stats: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicStats'), 'exclude': lambda f: f is None }}) + public_stats: NotRequired[bool] r"""Whether the short link's stats are publicly accessible.""" - tag_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) - r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. + tag_id: NotRequired[Nullable[str]] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: NotRequired[TagIdsTypedDict] + r"""The unique IDs of the tags assigned to the short link.""" + tag_names: NotRequired[TagNamesTypedDict] + r"""The unique name of the tags assigned to the short link (case insensitive).""" + comments: NotRequired[Nullable[str]] + r"""The comments for the short link.""" + expires_at: NotRequired[Nullable[str]] + r"""The date and time when the short link will expire at.""" + expired_url: NotRequired[Nullable[str]] + r"""The URL to redirect to when the short link has expired.""" + password: NotRequired[Nullable[str]] + r"""The password required to access the destination URL of the short link.""" + proxy: NotRequired[bool] + r"""Whether the short link uses Custom Social Media Cards feature.""" + title: NotRequired[Nullable[str]] + r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + description: NotRequired[Nullable[str]] + r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + image: NotRequired[Nullable[str]] + r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + rewrite: NotRequired[bool] + r"""Whether the short link uses link cloaking.""" + ios: NotRequired[Nullable[str]] + r"""The iOS destination URL for the short link for iOS device targeting.""" + android: NotRequired[Nullable[str]] + r"""The Android destination URL for the short link for Android device targeting.""" + geo: NotRequired[Nullable[components_linkgeotargeting.LinkGeoTargetingTypedDict]] + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" + do_index: NotRequired[bool] + r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - tag_ids: Optional[TagIds] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagIds'), 'exclude': lambda f: f is None }}) +class CreateLinkRequestBody(BaseModel): + url: str + r"""The destination URL of the short link.""" + domain: Optional[str] = None + r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" + key: Optional[str] = None + r"""The short link slug. If not provided, a random 7-character slug will be generated.""" + external_id: Annotated[Optional[Nullable[str]], pydantic.Field(alias="externalId")] = None + r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" + prefix: Optional[str] = None + r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" + track_conversion: Annotated[Optional[bool], pydantic.Field(alias="trackConversion")] = False + r"""Whether to track conversions for the short link.""" + archived: Optional[bool] = False + r"""Whether the short link is archived.""" + public_stats: Annotated[Optional[bool], pydantic.Field(alias="publicStats")] = False + r"""Whether the short link's stats are publicly accessible.""" + tag_id: Annotated[Optional[Nullable[str]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="tagId")] = None + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[TagIds], pydantic.Field(alias="tagIds")] = None r"""The unique IDs of the tags assigned to the short link.""" - tag_names: Optional[TagNames] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagNames'), 'exclude': lambda f: f is None }}) + tag_names: Annotated[Optional[TagNames], pydantic.Field(alias="tagNames")] = None r"""The unique name of the tags assigned to the short link (case insensitive).""" - comments: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comments'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + comments: Optional[Nullable[str]] = None r"""The comments for the short link.""" - expires_at: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiresAt'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + expires_at: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiresAt")] = None r"""The date and time when the short link will expire at.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""The URL to redirect to when the short link has expired.""" - password: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + password: Optional[Nullable[str]] = None r"""The password required to access the destination URL of the short link.""" - proxy: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('proxy'), 'exclude': lambda f: f is None }}) + proxy: Optional[bool] = False r"""Whether the short link uses Custom Social Media Cards feature.""" - title: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + title: Optional[Nullable[str]] = None r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - description: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + description: Optional[Nullable[str]] = None r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - image: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + image: Optional[Nullable[str]] = None r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - rewrite: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rewrite'), 'exclude': lambda f: f is None }}) + rewrite: Optional[bool] = False r"""Whether the short link uses link cloaking.""" - ios: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ios'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + ios: Optional[Nullable[str]] = None r"""The iOS destination URL for the short link for iOS device targeting.""" - android: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('android'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + android: Optional[Nullable[str]] = None r"""The Android destination URL for the short link for Android device targeting.""" - geo: Optional[components_linkgeotargeting.LinkGeoTargeting] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo'), 'exclude': lambda f: f is CreateLinkRequestBody.UNSET }}) + geo: Optional[Nullable[components_linkgeotargeting.LinkGeoTargeting]] = None r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" - do_index: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doIndex'), 'exclude': lambda f: f is None }}) + do_index: Annotated[Optional[bool], pydantic.Field(alias="doIndex")] = False r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["domain", "key", "externalId", "prefix", "trackConversion", "archived", "publicStats", "tagId", "tagIds", "tagNames", "comments", "expiresAt", "expiredUrl", "password", "proxy", "title", "description", "image", "rewrite", "ios", "android", "geo", "doIndex"] + nullable_fields = ["externalId", "tagId", "comments", "expiresAt", "expiredUrl", "password", "title", "description", "image", "ios", "android", "geo"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + + return m + + +TagIdsTypedDict = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" TagIds = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + + +TagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + TagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + diff --git a/src/dub/models/operations/createtag.py b/src/dub/models/operations/createtag.py index ae0b816..a4a07e9 100644 --- a/src/dub/models/operations/createtag.py +++ b/src/dub/models/operations/createtag.py @@ -1,43 +1,47 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata from enum import Enum -from typing import Optional +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class CreateTagGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class CreateTagGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] - +class CreateTagGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + class Color(str, Enum): r"""The color of the tag. If not provided, a random color will be used from the list: red, yellow, green, blue, purple, pink, brown.""" - RED = 'red' - YELLOW = 'yellow' - GREEN = 'green' - BLUE = 'blue' - PURPLE = 'purple' - PINK = 'pink' - BROWN = 'brown' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class CreateTagRequestBody: - name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + RED = "red" + YELLOW = "yellow" + GREEN = "green" + BLUE = "blue" + PURPLE = "purple" + PINK = "pink" + BROWN = "brown" + + +class CreateTagRequestBodyTypedDict(TypedDict): + name: NotRequired[str] r"""The name of the tag to create.""" - color: Optional[Color] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color'), 'exclude': lambda f: f is None }}) + color: NotRequired[Color] r"""The color of the tag. If not provided, a random color will be used from the list: red, yellow, green, blue, purple, pink, brown.""" - tag: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tag'), 'exclude': lambda f: f is None }}) - r"""The name of the tag to create. - - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ + tag: NotRequired[str] + r"""The name of the tag to create.""" +class CreateTagRequestBody(BaseModel): + name: Optional[str] = None + r"""The name of the tag to create.""" + color: Optional[Color] = None + r"""The color of the tag. If not provided, a random color will be used from the list: red, yellow, green, blue, purple, pink, brown.""" + tag: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.")] = None + r"""The name of the tag to create.""" + diff --git a/src/dub/models/operations/deletedomain.py b/src/dub/models/operations/deletedomain.py index 0ea39fc..94b8687 100644 --- a/src/dub/models/operations/deletedomain.py +++ b/src/dub/models/operations/deletedomain.py @@ -1,33 +1,41 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class DeleteDomainGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class DeleteDomainGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class DeleteDomainGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclasses.dataclass -class DeleteDomainRequest: - slug: str = dataclasses.field(metadata={'path_param': { 'field_name': 'slug', 'style': 'simple', 'explode': False }}) +class DeleteDomainRequestTypedDict(TypedDict): + slug: str r"""The domain name.""" +class DeleteDomainRequest(BaseModel): + slug: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The domain name.""" + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DeleteDomainResponseBody: +class DeleteDomainResponseBodyTypedDict(TypedDict): r"""The domain was deleted.""" - slug: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slug') }}) + + slug: str r"""The domain name.""" +class DeleteDomainResponseBody(BaseModel): + r"""The domain was deleted.""" + + slug: str + r"""The domain name.""" + diff --git a/src/dub/models/operations/deletelink.py b/src/dub/models/operations/deletelink.py index 11e3b59..57fba6d 100644 --- a/src/dub/models/operations/deletelink.py +++ b/src/dub/models/operations/deletelink.py @@ -1,33 +1,41 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class DeleteLinkGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class DeleteLinkGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class DeleteLinkGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclasses.dataclass -class DeleteLinkRequest: - link_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'linkId', 'style': 'simple', 'explode': False }}) +class DeleteLinkRequestTypedDict(TypedDict): + link_id: str r"""The id of the link to delete. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`.""" +class DeleteLinkRequest(BaseModel): + link_id: Annotated[str, pydantic.Field(alias="linkId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The id of the link to delete. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`.""" + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DeleteLinkResponseBody: +class DeleteLinkResponseBodyTypedDict(TypedDict): r"""The deleted link""" - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) + + id: str r"""The ID of the link.""" +class DeleteLinkResponseBody(BaseModel): + r"""The deleted link""" + + id: str + r"""The ID of the link.""" + diff --git a/src/dub/models/operations/getlinkinfo.py b/src/dub/models/operations/getlinkinfo.py index b8dbefa..7856051 100644 --- a/src/dub/models/operations/getlinkinfo.py +++ b/src/dub/models/operations/getlinkinfo.py @@ -1,26 +1,37 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class GetLinkInfoGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class GetLinkInfoGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class GetLinkInfoGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclasses.dataclass -class GetLinkInfoRequest: - domain: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'domain', 'style': 'form', 'explode': True }}) - key: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'key', 'style': 'form', 'explode': True }}) +class GetLinkInfoRequestTypedDict(TypedDict): + domain: NotRequired[str] + key: NotRequired[str] r"""The key of the link to retrieve. E.g. for `d.to/github`, the key is `github`.""" - link_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'linkId', 'style': 'form', 'explode': True }}) + link_id: NotRequired[str] r"""The unique ID of the short link.""" - external_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'externalId', 'style': 'form', 'explode': True }}) + external_id: NotRequired[str] r"""This is the ID of the link in the your database. Must be prefixed with `ext_` when passed as a query parameter.""" +class GetLinkInfoRequest(BaseModel): + domain: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + key: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The key of the link to retrieve. E.g. for `d.to/github`, the key is `github`.""" + link_id: Annotated[Optional[str], pydantic.Field(alias="linkId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The unique ID of the short link.""" + external_id: Annotated[Optional[str], pydantic.Field(alias="externalId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""This is the ID of the link in the your database. Must be prefixed with `ext_` when passed as a query parameter.""" + diff --git a/src/dub/models/operations/getlinks.py b/src/dub/models/operations/getlinks.py index c7e7891..78ddb85 100644 --- a/src/dub/models/operations/getlinks.py +++ b/src/dub/models/operations/getlinks.py @@ -1,51 +1,87 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata from enum import Enum -from typing import List, Optional, Union +import pydantic +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class GetLinksGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class GetLinksGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] - +class GetLinksGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + class Sort(str, Enum): r"""The field to sort the links by. The default is `createdAt`, and sort order is always descending.""" - CREATED_AT = 'createdAt' - CLICKS = 'clicks' - LAST_CLICKED = 'lastClicked' + CREATED_AT = "createdAt" + CLICKS = "clicks" + LAST_CLICKED = "lastClicked" -@dataclasses.dataclass -class GetLinksRequest: - domain: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'domain', 'style': 'form', 'explode': True }}) +class GetLinksRequestTypedDict(TypedDict): + domain: NotRequired[str] r"""The domain to filter the links by. E.g. `ac.me`. If not provided, all links for the workspace will be returned.""" - tag_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagId', 'style': 'form', 'explode': True }}) + tag_id: NotRequired[str] r"""The tag ID to filter the links by. This field is deprecated – use `tagIds` instead.""" - tag_ids: Optional[QueryParamTagIds] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagIds', 'style': 'form', 'explode': True }}) + tag_ids: NotRequired[QueryParamTagIdsTypedDict] r"""The tag IDs to filter the links by.""" - tag_names: Optional[QueryParamTagNames] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagNames', 'style': 'form', 'explode': True }}) + tag_names: NotRequired[QueryParamTagNamesTypedDict] r"""The unique name of the tags assigned to the short link (case insensitive).""" - search: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'search', 'style': 'form', 'explode': True }}) + search: NotRequired[str] r"""The search term to filter the links by. The search term will be matched against the short link slug and the destination url.""" - user_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'userId', 'style': 'form', 'explode': True }}) + user_id: NotRequired[str] r"""The user ID to filter the links by.""" - show_archived: Optional[bool] = dataclasses.field(default=True, metadata={'query_param': { 'field_name': 'showArchived', 'style': 'form', 'explode': True }}) + show_archived: NotRequired[bool] r"""Whether to include archived links in the response. Defaults to `false` if not provided.""" - with_tags: Optional[bool] = dataclasses.field(default=True, metadata={'query_param': { 'field_name': 'withTags', 'style': 'form', 'explode': True }}) + with_tags: NotRequired[bool] r"""Whether to include tags in the response. Defaults to `false` if not provided.""" - sort: Optional[Sort] = dataclasses.field(default=Sort.CREATED_AT, metadata={'query_param': { 'field_name': 'sort', 'style': 'form', 'explode': True }}) + sort: NotRequired[Sort] r"""The field to sort the links by. The default is `createdAt`, and sort order is always descending.""" - page: Optional[int] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'page', 'style': 'form', 'explode': True }}) + page: NotRequired[int] r"""The page number for pagination (each page contains 100 links).""" +class GetLinksRequest(BaseModel): + domain: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The domain to filter the links by. E.g. `ac.me`. If not provided, all links for the workspace will be returned.""" + tag_id: Annotated[Optional[str], pydantic.Field(alias="tagId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The tag ID to filter the links by. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[QueryParamTagIds], pydantic.Field(alias="tagIds"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The tag IDs to filter the links by.""" + tag_names: Annotated[Optional[QueryParamTagNames], pydantic.Field(alias="tagNames"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The unique name of the tags assigned to the short link (case insensitive).""" + search: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The search term to filter the links by. The search term will be matched against the short link slug and the destination url.""" + user_id: Annotated[Optional[str], pydantic.Field(alias="userId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The user ID to filter the links by.""" + show_archived: Annotated[Optional[bool], pydantic.Field(alias="showArchived"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = True + r"""Whether to include archived links in the response. Defaults to `false` if not provided.""" + with_tags: Annotated[Optional[bool], pydantic.Field(alias="withTags"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = True + r"""Whether to include tags in the response. Defaults to `false` if not provided.""" + sort: Annotated[Optional[Sort], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = Sort.CREATED_AT + r"""The field to sort the links by. The default is `createdAt`, and sort order is always descending.""" + page: Annotated[Optional[int], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The page number for pagination (each page contains 100 links).""" + + +QueryParamTagIdsTypedDict = Union[str, List[str]] +r"""The tag IDs to filter the links by.""" + QueryParamTagIds = Union[str, List[str]] +r"""The tag IDs to filter the links by.""" + + +QueryParamTagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + QueryParamTagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + diff --git a/src/dub/models/operations/getlinkscount.py b/src/dub/models/operations/getlinkscount.py index a3c7160..6c55d98 100644 --- a/src/dub/models/operations/getlinkscount.py +++ b/src/dub/models/operations/getlinkscount.py @@ -1,52 +1,92 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata from enum import Enum -from typing import List, Optional, Union +import pydantic +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class GetLinksCountGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class GetLinksCountGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] - +class GetLinksCountGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + class Two(str, Enum): - TAG_ID = 'tagId' + TAG_ID = "tagId" class One(str, Enum): - DOMAIN = 'domain' + DOMAIN = "domain" -@dataclasses.dataclass -class GetLinksCountRequest: - domain: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'domain', 'style': 'form', 'explode': True }}) +class GetLinksCountRequestTypedDict(TypedDict): + domain: NotRequired[str] r"""The domain to filter the links by. E.g. `ac.me`. If not provided, all links for the workspace will be returned.""" - tag_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagId', 'style': 'form', 'explode': True }}) + tag_id: NotRequired[str] r"""The tag ID to filter the links by. This field is deprecated – use `tagIds` instead.""" - tag_ids: Optional[GetLinksCountQueryParamTagIds] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagIds', 'style': 'form', 'explode': True }}) + tag_ids: NotRequired[GetLinksCountQueryParamTagIdsTypedDict] r"""The tag IDs to filter the links by.""" - tag_names: Optional[GetLinksCountQueryParamTagNames] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagNames', 'style': 'form', 'explode': True }}) + tag_names: NotRequired[GetLinksCountQueryParamTagNamesTypedDict] r"""The unique name of the tags assigned to the short link (case insensitive).""" - search: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'search', 'style': 'form', 'explode': True }}) + search: NotRequired[str] r"""The search term to filter the links by. The search term will be matched against the short link slug and the destination url.""" - user_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'userId', 'style': 'form', 'explode': True }}) + user_id: NotRequired[str] r"""The user ID to filter the links by.""" - show_archived: Optional[bool] = dataclasses.field(default=True, metadata={'query_param': { 'field_name': 'showArchived', 'style': 'form', 'explode': True }}) + show_archived: NotRequired[bool] r"""Whether to include archived links in the response. Defaults to `false` if not provided.""" - with_tags: Optional[bool] = dataclasses.field(default=True, metadata={'query_param': { 'field_name': 'withTags', 'style': 'form', 'explode': True }}) + with_tags: NotRequired[bool] r"""Whether to include tags in the response. Defaults to `false` if not provided.""" - group_by: Optional[GroupBy] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'groupBy', 'style': 'form', 'explode': True }}) + group_by: NotRequired[GroupByTypedDict] r"""The field to group the links by.""" +class GetLinksCountRequest(BaseModel): + domain: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The domain to filter the links by. E.g. `ac.me`. If not provided, all links for the workspace will be returned.""" + tag_id: Annotated[Optional[str], pydantic.Field(alias="tagId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The tag ID to filter the links by. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[GetLinksCountQueryParamTagIds], pydantic.Field(alias="tagIds"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The tag IDs to filter the links by.""" + tag_names: Annotated[Optional[GetLinksCountQueryParamTagNames], pydantic.Field(alias="tagNames"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The unique name of the tags assigned to the short link (case insensitive).""" + search: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The search term to filter the links by. The search term will be matched against the short link slug and the destination url.""" + user_id: Annotated[Optional[str], pydantic.Field(alias="userId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The user ID to filter the links by.""" + show_archived: Annotated[Optional[bool], pydantic.Field(alias="showArchived"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = True + r"""Whether to include archived links in the response. Defaults to `false` if not provided.""" + with_tags: Annotated[Optional[bool], pydantic.Field(alias="withTags"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = True + r"""Whether to include tags in the response. Defaults to `false` if not provided.""" + group_by: Annotated[Optional[GroupBy], pydantic.Field(alias="groupBy"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""The field to group the links by.""" + + +GetLinksCountQueryParamTagIdsTypedDict = Union[str, List[str]] +r"""The tag IDs to filter the links by.""" + GetLinksCountQueryParamTagIds = Union[str, List[str]] +r"""The tag IDs to filter the links by.""" + + +GetLinksCountQueryParamTagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + GetLinksCountQueryParamTagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + + +GroupByTypedDict = Union[One, Two] +r"""The field to group the links by.""" + GroupBy = Union[One, Two] +r"""The field to group the links by.""" + diff --git a/src/dub/models/operations/getmetatags.py b/src/dub/models/operations/getmetatags.py index bd74bab..f2f96fc 100644 --- a/src/dub/models/operations/getmetatags.py +++ b/src/dub/models/operations/getmetatags.py @@ -1,29 +1,66 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +from pydantic import model_serializer +from typing import TypedDict +from typing_extensions import Annotated -@dataclasses.dataclass -class GetMetatagsRequest: - url: str = dataclasses.field(metadata={'query_param': { 'field_name': 'url', 'style': 'form', 'explode': True }}) +class GetMetatagsRequestTypedDict(TypedDict): + url: str r"""The URL to retrieve metatags for.""" +class GetMetatagsRequest(BaseModel): + url: Annotated[str, FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] + r"""The URL to retrieve metatags for.""" + +class GetMetatagsResponseBodyTypedDict(TypedDict): + r"""The retrieved metatags""" + + title: Nullable[str] + r"""The meta title tag for the URL.""" + description: Nullable[str] + r"""The meta description tag for the URL.""" + image: Nullable[str] + r"""The OpenGraph image for the URL.""" + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class GetMetatagsResponseBody: +class GetMetatagsResponseBody(BaseModel): r"""The retrieved metatags""" - title: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title') }}) + + title: Nullable[str] r"""The meta title tag for the URL.""" - description: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description') }}) + description: Nullable[str] r"""The meta description tag for the URL.""" - image: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image') }}) + image: Nullable[str] r"""The OpenGraph image for the URL.""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = [] + nullable_fields = ["title", "description", "image"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/operations/getqrcode.py b/src/dub/models/operations/getqrcode.py index 568d47c..b06ee59 100644 --- a/src/dub/models/operations/getqrcode.py +++ b/src/dub/models/operations/getqrcode.py @@ -1,32 +1,48 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata from enum import Enum -from typing import Optional +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired class Level(str, Enum): r"""The level of error correction to use for the QR code. Defaults to `L` if not provided.""" - L = 'L' - M = 'M' - Q = 'Q' - H = 'H' + L = "L" + M = "M" + Q = "Q" + H = "H" -@dataclasses.dataclass -class GetQRCodeRequest: - url: str = dataclasses.field(metadata={'query_param': { 'field_name': 'url', 'style': 'form', 'explode': True }}) +class GetQRCodeRequestTypedDict(TypedDict): + url: str r"""The URL to generate a QR code for.""" - size: Optional[float] = dataclasses.field(default=600, metadata={'query_param': { 'field_name': 'size', 'style': 'form', 'explode': True }}) + size: NotRequired[float] r"""The size of the QR code in pixels. Defaults to `600` if not provided.""" - level: Optional[Level] = dataclasses.field(default=Level.L, metadata={'query_param': { 'field_name': 'level', 'style': 'form', 'explode': True }}) + level: NotRequired[Level] r"""The level of error correction to use for the QR code. Defaults to `L` if not provided.""" - fg_color: Optional[str] = dataclasses.field(default='#000000', metadata={'query_param': { 'field_name': 'fgColor', 'style': 'form', 'explode': True }}) + fg_color: NotRequired[str] r"""The foreground color of the QR code in hex format. Defaults to `#000000` if not provided.""" - bg_color: Optional[str] = dataclasses.field(default='#FFFFFF', metadata={'query_param': { 'field_name': 'bgColor', 'style': 'form', 'explode': True }}) + bg_color: NotRequired[str] r"""The background color of the QR code in hex format. Defaults to `#ffffff` if not provided.""" - include_margin: Optional[bool] = dataclasses.field(default=True, metadata={'query_param': { 'field_name': 'includeMargin', 'style': 'form', 'explode': True }}) + include_margin: NotRequired[bool] r"""Whether to include a margin around the QR code. Defaults to `false` if not provided.""" +class GetQRCodeRequest(BaseModel): + url: Annotated[str, FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] + r"""The URL to generate a QR code for.""" + size: Annotated[Optional[float], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = 600 + r"""The size of the QR code in pixels. Defaults to `600` if not provided.""" + level: Annotated[Optional[Level], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = Level.L + r"""The level of error correction to use for the QR code. Defaults to `L` if not provided.""" + fg_color: Annotated[Optional[str], pydantic.Field(alias="fgColor"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = "#000000" + r"""The foreground color of the QR code in hex format. Defaults to `#000000` if not provided.""" + bg_color: Annotated[Optional[str], pydantic.Field(alias="bgColor"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = "#FFFFFF" + r"""The background color of the QR code in hex format. Defaults to `#ffffff` if not provided.""" + include_margin: Annotated[Optional[bool], pydantic.Field(alias="includeMargin"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = True + r"""Whether to include a margin around the QR code. Defaults to `false` if not provided.""" + diff --git a/src/dub/models/operations/gettags.py b/src/dub/models/operations/gettags.py index 4daa33c..c9036b2 100644 --- a/src/dub/models/operations/gettags.py +++ b/src/dub/models/operations/gettags.py @@ -1,18 +1,25 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class GetTagsGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class GetTagsGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class GetTagsGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + +class GetTagsRequestTypedDict(TypedDict): + pass + -@dataclasses.dataclass -class GetTagsRequest: +class GetTagsRequest(BaseModel): pass + diff --git a/src/dub/models/operations/getworkspace.py b/src/dub/models/operations/getworkspace.py index 53dafe0..09ece37 100644 --- a/src/dub/models/operations/getworkspace.py +++ b/src/dub/models/operations/getworkspace.py @@ -1,12 +1,19 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses +from dub.types import BaseModel +from dub.utils import FieldMetadata, PathParamMetadata +import pydantic +from typing import TypedDict +from typing_extensions import Annotated -@dataclasses.dataclass -class GetWorkspaceRequest: - id_or_slug: str = dataclasses.field(metadata={'path_param': { 'field_name': 'idOrSlug', 'style': 'simple', 'explode': False }}) +class GetWorkspaceRequestTypedDict(TypedDict): + id_or_slug: str r"""The ID or slug of the workspace.""" +class GetWorkspaceRequest(BaseModel): + id_or_slug: Annotated[str, pydantic.Field(alias="idOrSlug"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The ID or slug of the workspace.""" + diff --git a/src/dub/models/operations/listdomains.py b/src/dub/models/operations/listdomains.py index c162fd2..b88dfc2 100644 --- a/src/dub/models/operations/listdomains.py +++ b/src/dub/models/operations/listdomains.py @@ -1,18 +1,25 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from typing import Optional +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class ListDomainsGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class ListDomainsGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class ListDomainsGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + +class ListDomainsRequestTypedDict(TypedDict): + pass + -@dataclasses.dataclass -class ListDomainsRequest: +class ListDomainsRequest(BaseModel): pass + diff --git a/src/dub/models/operations/retrieveanalytics.py b/src/dub/models/operations/retrieveanalytics.py index 0d91d47..3182992 100644 --- a/src/dub/models/operations/retrieveanalytics.py +++ b/src/dub/models/operations/retrieveanalytics.py @@ -1,128 +1,148 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from ...models.components import clicksbrowsers as components_clicksbrowsers -from ...models.components import clickscities as components_clickscities -from ...models.components import clickscount as components_clickscount -from ...models.components import clickscountries as components_clickscountries -from ...models.components import clicksdevices as components_clicksdevices -from ...models.components import clicksos as components_clicksos -from ...models.components import clicksreferers as components_clicksreferers -from ...models.components import clickstimeseries as components_clickstimeseries -from ...models.components import clickstoplinks as components_clickstoplinks -from ...models.components import clickstopurls as components_clickstopurls -from ...models.components import countrycode as components_countrycode -from ...models.components import leadsbrowsers as components_leadsbrowsers -from ...models.components import leadscities as components_leadscities -from ...models.components import leadscount as components_leadscount -from ...models.components import leadscountries as components_leadscountries -from ...models.components import leadsdevices as components_leadsdevices -from ...models.components import leadsos as components_leadsos -from ...models.components import leadsreferers as components_leadsreferers -from ...models.components import leadstimeseries as components_leadstimeseries -from ...models.components import leadstoplinks as components_leadstoplinks -from ...models.components import leadstopurls as components_leadstopurls -from ...models.components import salesbrowsers as components_salesbrowsers -from ...models.components import salescities as components_salescities -from ...models.components import salescount as components_salescount -from ...models.components import salescountries as components_salescountries -from ...models.components import salesdevices as components_salesdevices -from ...models.components import salesos as components_salesos -from ...models.components import salesreferers as components_salesreferers -from ...models.components import salestimeseries as components_salestimeseries -from ...models.components import salestoplinks as components_salestoplinks -from ...models.components import salestopurls as components_salestopurls +from dub.models.components import clicksbrowsers as components_clicksbrowsers, clickscities as components_clickscities, clickscount as components_clickscount, clickscountries as components_clickscountries, clicksdevices as components_clicksdevices, clicksos as components_clicksos, clicksreferers as components_clicksreferers, clickstimeseries as components_clickstimeseries, clickstoplinks as components_clickstoplinks, clickstopurls as components_clickstopurls, countrycode as components_countrycode, leadsbrowsers as components_leadsbrowsers, leadscities as components_leadscities, leadscount as components_leadscount, leadscountries as components_leadscountries, leadsdevices as components_leadsdevices, leadsos as components_leadsos, leadsreferers as components_leadsreferers, leadstimeseries as components_leadstimeseries, leadstoplinks as components_leadstoplinks, leadstopurls as components_leadstopurls, salesbrowsers as components_salesbrowsers, salescities as components_salescities, salescount as components_salescount, salescountries as components_salescountries, salesdevices as components_salesdevices, salesos as components_salesos, salesreferers as components_salesreferers, salestimeseries as components_salestimeseries, salestoplinks as components_salestoplinks, salestopurls as components_salestopurls +from dub.types import BaseModel +from dub.utils import FieldMetadata, QueryParamMetadata from enum import Enum -from typing import List, Optional, Union +import pydantic +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class RetrieveAnalyticsGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class RetrieveAnalyticsGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] - +class RetrieveAnalyticsGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + class Event(str, Enum): r"""The type of event to retrieve analytics for. Defaults to 'clicks'.""" - CLICKS = 'clicks' - LEADS = 'leads' - SALES = 'sales' - COMPOSITE = 'composite' + CLICKS = "clicks" + LEADS = "leads" + SALES = "sales" + COMPOSITE = "composite" class QueryParamGroupBy(str, Enum): r"""The parameter to group the analytics data points by. Defaults to 'count' if undefined.""" - COUNT = 'count' - TIMESERIES = 'timeseries' - COUNTRIES = 'countries' - CITIES = 'cities' - DEVICES = 'devices' - BROWSERS = 'browsers' - OS = 'os' - REFERERS = 'referers' - TOP_LINKS = 'top_links' - TOP_URLS = 'top_urls' - TRIGGER = 'trigger' + COUNT = "count" + TIMESERIES = "timeseries" + COUNTRIES = "countries" + CITIES = "cities" + DEVICES = "devices" + BROWSERS = "browsers" + OS = "os" + REFERERS = "referers" + TOP_LINKS = "top_links" + TOP_URLS = "top_urls" + TRIGGER = "trigger" class Interval(str, Enum): r"""The interval to retrieve analytics for. Takes precedence over start and end. If undefined, defaults to 24h.""" - TWENTY_FOURH = '24h' - SEVEND = '7d' - THIRTYD = '30d' - NINETYD = '90d' - YTD = 'ytd' - ONEY = '1y' - ALL = 'all' - ALL_UNFILTERED = 'all_unfiltered' - - -@dataclasses.dataclass -class RetrieveAnalyticsRequest: - event: Optional[Event] = dataclasses.field(default=Event.CLICKS, metadata={'query_param': { 'field_name': 'event', 'style': 'form', 'explode': True }}) + TWENTY_FOURH = "24h" + SEVEND = "7d" + THIRTYD = "30d" + NINETYD = "90d" + YTD = "ytd" + ONEY = "1y" + ALL = "all" + ALL_UNFILTERED = "all_unfiltered" + + +class RetrieveAnalyticsRequestTypedDict(TypedDict): + event: NotRequired[Event] + r"""The type of event to retrieve analytics for. Defaults to 'clicks'.""" + group_by: NotRequired[QueryParamGroupBy] + r"""The parameter to group the analytics data points by. Defaults to 'count' if undefined.""" + domain: NotRequired[str] + r"""The domain to filter analytics for.""" + key: NotRequired[str] + r"""The short link slug.""" + link_id: NotRequired[str] + r"""The unique ID of the short link on Dub.""" + external_id: NotRequired[str] + r"""This is the ID of the link in the your database. Must be prefixed with 'ext_' when passed as a query parameter.""" + interval: NotRequired[Interval] + r"""The interval to retrieve analytics for. Takes precedence over start and end. If undefined, defaults to 24h.""" + start: NotRequired[str] + r"""The start date and time when to retrieve analytics from.""" + end: NotRequired[str] + r"""The end date and time when to retrieve analytics from. If not provided, defaults to the current date.""" + timezone: NotRequired[str] + r"""The IANA time zone code for aligning timeseries granularity (e.g. America/New_York). Defaults to UTC.""" + country: NotRequired[components_countrycode.CountryCode] + r"""The country to retrieve analytics for.""" + city: NotRequired[str] + r"""The city to retrieve analytics for.""" + device: NotRequired[str] + r"""The device to retrieve analytics for.""" + browser: NotRequired[str] + r"""The browser to retrieve analytics for.""" + os: NotRequired[str] + r"""The OS to retrieve analytics for.""" + referer: NotRequired[str] + r"""The referer to retrieve analytics for.""" + url: NotRequired[str] + r"""The URL to retrieve analytics for.""" + tag_id: NotRequired[str] + r"""The tag ID to retrieve analytics for.""" + qr: NotRequired[bool] + r"""Filter for QR code scans. If true, filter for QR codes only. If false, filter for links only. If undefined, return both.""" + root: NotRequired[bool] + r"""Filter for root domains. If true, filter for domains only. If false, filter for links only. If undefined, return both.""" + + +class RetrieveAnalyticsRequest(BaseModel): + event: Annotated[Optional[Event], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = Event.CLICKS r"""The type of event to retrieve analytics for. Defaults to 'clicks'.""" - group_by: Optional[QueryParamGroupBy] = dataclasses.field(default=QueryParamGroupBy.COUNT, metadata={'query_param': { 'field_name': 'groupBy', 'style': 'form', 'explode': True }}) + group_by: Annotated[Optional[QueryParamGroupBy], pydantic.Field(alias="groupBy"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = QueryParamGroupBy.COUNT r"""The parameter to group the analytics data points by. Defaults to 'count' if undefined.""" - domain: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'domain', 'style': 'form', 'explode': True }}) + domain: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The domain to filter analytics for.""" - key: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'key', 'style': 'form', 'explode': True }}) + key: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The short link slug.""" - link_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'linkId', 'style': 'form', 'explode': True }}) + link_id: Annotated[Optional[str], pydantic.Field(alias="linkId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The unique ID of the short link on Dub.""" - external_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'externalId', 'style': 'form', 'explode': True }}) + external_id: Annotated[Optional[str], pydantic.Field(alias="externalId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""This is the ID of the link in the your database. Must be prefixed with 'ext_' when passed as a query parameter.""" - interval: Optional[Interval] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'interval', 'style': 'form', 'explode': True }}) + interval: Annotated[Optional[Interval], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The interval to retrieve analytics for. Takes precedence over start and end. If undefined, defaults to 24h.""" - start: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'start', 'style': 'form', 'explode': True }}) + start: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The start date and time when to retrieve analytics from.""" - end: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'end', 'style': 'form', 'explode': True }}) + end: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The end date and time when to retrieve analytics from. If not provided, defaults to the current date.""" - timezone: Optional[str] = dataclasses.field(default='UTC', metadata={'query_param': { 'field_name': 'timezone', 'style': 'form', 'explode': True }}) + timezone: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = "UTC" r"""The IANA time zone code for aligning timeseries granularity (e.g. America/New_York). Defaults to UTC.""" - country: Optional[components_countrycode.CountryCode] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'country', 'style': 'form', 'explode': True }}) + country: Annotated[Optional[components_countrycode.CountryCode], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The country to retrieve analytics for.""" - city: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'city', 'style': 'form', 'explode': True }}) + city: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The city to retrieve analytics for.""" - device: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'device', 'style': 'form', 'explode': True }}) + device: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The device to retrieve analytics for.""" - browser: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'browser', 'style': 'form', 'explode': True }}) + browser: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The browser to retrieve analytics for.""" - os: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'os', 'style': 'form', 'explode': True }}) + os: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The OS to retrieve analytics for.""" - referer: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'referer', 'style': 'form', 'explode': True }}) + referer: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The referer to retrieve analytics for.""" - url: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'url', 'style': 'form', 'explode': True }}) + url: Annotated[Optional[str], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The URL to retrieve analytics for.""" - tag_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagId', 'style': 'form', 'explode': True }}) + tag_id: Annotated[Optional[str], pydantic.Field(alias="tagId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""The tag ID to retrieve analytics for.""" - qr: Optional[bool] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'qr', 'style': 'form', 'explode': True }}) + qr: Annotated[Optional[bool], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""Filter for QR code scans. If true, filter for QR codes only. If false, filter for links only. If undefined, return both.""" - root: Optional[bool] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'root', 'style': 'form', 'explode': True }}) + root: Annotated[Optional[bool], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None r"""Filter for root domains. If true, filter for domains only. If false, filter for links only. If undefined, return both.""" +RetrieveAnalyticsResponseBodyTypedDict = Union[components_leadscount.LeadsCountTypedDict, components_clickscount.ClicksCountTypedDict, components_salescount.SalesCountTypedDict, List[components_leadsdevices.LeadsDevicesTypedDict], List[components_leadsos.LeadsOSTypedDict], List[components_clicksbrowsers.ClicksBrowsersTypedDict], List[components_clicksos.ClicksOSTypedDict], List[components_clicksreferers.ClicksReferersTypedDict], List[components_clickstoplinks.ClicksTopLinksTypedDict], List[components_clickstopurls.ClicksTopUrlsTypedDict], List[components_clickscities.ClicksCitiesTypedDict], List[components_leadstimeseries.LeadsTimeseriesTypedDict], List[components_leadscountries.LeadsCountriesTypedDict], List[components_leadscities.LeadsCitiesTypedDict], List[components_clickscountries.ClicksCountriesTypedDict], List[components_leadsbrowsers.LeadsBrowsersTypedDict], List[components_clicksdevices.ClicksDevicesTypedDict], List[components_leadsreferers.LeadsReferersTypedDict], List[components_leadstoplinks.LeadsTopLinksTypedDict], List[components_leadstopurls.LeadsTopUrlsTypedDict], List[components_clickstimeseries.ClicksTimeseriesTypedDict], List[components_salestimeseries.SalesTimeseriesTypedDict], List[components_salescountries.SalesCountriesTypedDict], List[components_salescities.SalesCitiesTypedDict], List[components_salesdevices.SalesDevicesTypedDict], List[components_salesbrowsers.SalesBrowsersTypedDict], List[components_salesos.SalesOSTypedDict], List[components_salesreferers.SalesReferersTypedDict], List[components_salestoplinks.SalesTopLinksTypedDict], List[components_salestopurls.SalesTopUrlsTypedDict]] +r"""Analytics data""" + + +RetrieveAnalyticsResponseBody = Union[components_leadscount.LeadsCount, components_clickscount.ClicksCount, components_salescount.SalesCount, List[components_leadsdevices.LeadsDevices], List[components_leadsos.LeadsOS], List[components_clicksbrowsers.ClicksBrowsers], List[components_clicksos.ClicksOS], List[components_clicksreferers.ClicksReferers], List[components_clickstoplinks.ClicksTopLinks], List[components_clickstopurls.ClicksTopUrls], List[components_clickscities.ClicksCities], List[components_leadstimeseries.LeadsTimeseries], List[components_leadscountries.LeadsCountries], List[components_leadscities.LeadsCities], List[components_clickscountries.ClicksCountries], List[components_leadsbrowsers.LeadsBrowsers], List[components_clicksdevices.ClicksDevices], List[components_leadsreferers.LeadsReferers], List[components_leadstoplinks.LeadsTopLinks], List[components_leadstopurls.LeadsTopUrls], List[components_clickstimeseries.ClicksTimeseries], List[components_salestimeseries.SalesTimeseries], List[components_salescountries.SalesCountries], List[components_salescities.SalesCities], List[components_salesdevices.SalesDevices], List[components_salesbrowsers.SalesBrowsers], List[components_salesos.SalesOS], List[components_salesreferers.SalesReferers], List[components_salestoplinks.SalesTopLinks], List[components_salestopurls.SalesTopUrls]] +r"""Analytics data""" -RetrieveAnalyticsResponseBody = Union[components_clickscount.ClicksCount, List[components_clickstimeseries.ClicksTimeseries], List[components_clickscountries.ClicksCountries], List[components_clickscities.ClicksCities], List[components_clicksdevices.ClicksDevices], List[components_clicksbrowsers.ClicksBrowsers], List[components_clicksos.ClicksOS], List[components_clicksreferers.ClicksReferers], List[components_clickstoplinks.ClicksTopLinks], List[components_clickstopurls.ClicksTopUrls], components_leadscount.LeadsCount, List[components_leadstimeseries.LeadsTimeseries], List[components_leadscountries.LeadsCountries], List[components_leadscities.LeadsCities], List[components_leadsdevices.LeadsDevices], List[components_leadsbrowsers.LeadsBrowsers], List[components_leadsos.LeadsOS], List[components_leadsreferers.LeadsReferers], List[components_leadstoplinks.LeadsTopLinks], List[components_leadstopurls.LeadsTopUrls], components_salescount.SalesCount, List[components_salestimeseries.SalesTimeseries], List[components_salescountries.SalesCountries], List[components_salescities.SalesCities], List[components_salesdevices.SalesDevices], List[components_salesbrowsers.SalesBrowsers], List[components_salesos.SalesOS], List[components_salesreferers.SalesReferers], List[components_salestoplinks.SalesTopLinks], List[components_salestopurls.SalesTopUrls]] diff --git a/src/dub/models/operations/trackcustomer.py b/src/dub/models/operations/trackcustomer.py index eb91902..f45a5c5 100644 --- a/src/dub/models/operations/trackcustomer.py +++ b/src/dub/models/operations/trackcustomer.py @@ -1,42 +1,83 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from pydantic import model_serializer +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class TrackCustomerGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class TrackCustomerGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class TrackCustomerGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TrackCustomerRequestBody: - customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerId') }}) +class TrackCustomerRequestBodyTypedDict(TypedDict): + customer_id: str r"""This is the unique identifier for the customer in the client's app. This is used to track the customer's journey.""" - customer_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerName'), 'exclude': lambda f: f is None }}) + customer_name: NotRequired[str] r"""Name of the customer in the client's app.""" - customer_email: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerEmail'), 'exclude': lambda f: f is None }}) + customer_email: NotRequired[str] r"""Email of the customer in the client's app.""" - customer_avatar: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerAvatar'), 'exclude': lambda f: f is None }}) + customer_avatar: NotRequired[str] r"""Avatar of the customer in the client's app.""" +class TrackCustomerRequestBody(BaseModel): + customer_id: Annotated[str, pydantic.Field(alias="customerId")] + r"""This is the unique identifier for the customer in the client's app. This is used to track the customer's journey.""" + customer_name: Annotated[Optional[str], pydantic.Field(alias="customerName")] = None + r"""Name of the customer in the client's app.""" + customer_email: Annotated[Optional[str], pydantic.Field(alias="customerEmail")] = None + r"""Email of the customer in the client's app.""" + customer_avatar: Annotated[Optional[str], pydantic.Field(alias="customerAvatar")] = None + r"""Avatar of the customer in the client's app.""" + +class TrackCustomerResponseBodyTypedDict(TypedDict): + r"""A customer was tracked.""" + + customer_id: str + customer_name: Nullable[str] + customer_email: Nullable[str] + customer_avatar: Nullable[str] + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TrackCustomerResponseBody: +class TrackCustomerResponseBody(BaseModel): r"""A customer was tracked.""" - customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerId') }}) - customer_name: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerName') }}) - customer_email: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerEmail') }}) - customer_avatar: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerAvatar') }}) + customer_id: Annotated[str, pydantic.Field(alias="customerId")] + customer_name: Annotated[Nullable[str], pydantic.Field(alias="customerName")] + customer_email: Annotated[Nullable[str], pydantic.Field(alias="customerEmail")] + customer_avatar: Annotated[Nullable[str], pydantic.Field(alias="customerAvatar")] + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = [] + nullable_fields = ["customerName", "customerEmail", "customerAvatar"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/operations/tracklead.py b/src/dub/models/operations/tracklead.py index af36d07..393e329 100644 --- a/src/dub/models/operations/tracklead.py +++ b/src/dub/models/operations/tracklead.py @@ -1,52 +1,126 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Any, Dict, Optional +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from pydantic import model_serializer +from typing import Any, Dict, Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class TrackLeadGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class TrackLeadGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class TrackLeadGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + +class TrackLeadRequestBodyTypedDict(TypedDict): + click_id: str + r"""The ID of the click in th Dub. You can read this value from `dclid` cookie.""" + event_name: str + r"""The name of the event to track.""" + customer_id: str + r"""This is the unique identifier for the customer in the client's app. This is used to track the customer's journey.""" + customer_name: NotRequired[Nullable[str]] + r"""Name of the customer in the client's app.""" + customer_email: NotRequired[Nullable[str]] + r"""Email of the customer in the client's app.""" + customer_avatar: NotRequired[Nullable[str]] + r"""Avatar of the customer in the client's app.""" + metadata: NotRequired[Nullable[Dict[str, Any]]] + r"""Additional metadata to be stored with the lead event""" + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TrackLeadRequestBody: - UNSET='__SPEAKEASY_UNSET__' - click_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clickId') }}) +class TrackLeadRequestBody(BaseModel): + click_id: Annotated[str, pydantic.Field(alias="clickId")] r"""The ID of the click in th Dub. You can read this value from `dclid` cookie.""" - event_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('eventName') }}) + event_name: Annotated[str, pydantic.Field(alias="eventName")] r"""The name of the event to track.""" - customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerId') }}) + customer_id: Annotated[str, pydantic.Field(alias="customerId")] r"""This is the unique identifier for the customer in the client's app. This is used to track the customer's journey.""" - customer_name: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerName'), 'exclude': lambda f: f is TrackLeadRequestBody.UNSET }}) + customer_name: Annotated[Optional[Nullable[str]], pydantic.Field(alias="customerName")] = None r"""Name of the customer in the client's app.""" - customer_email: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerEmail'), 'exclude': lambda f: f is TrackLeadRequestBody.UNSET }}) + customer_email: Annotated[Optional[Nullable[str]], pydantic.Field(alias="customerEmail")] = None r"""Email of the customer in the client's app.""" - customer_avatar: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerAvatar'), 'exclude': lambda f: f is TrackLeadRequestBody.UNSET }}) + customer_avatar: Annotated[Optional[Nullable[str]], pydantic.Field(alias="customerAvatar")] = None r"""Avatar of the customer in the client's app.""" - metadata: Optional[Dict[str, Any]] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata'), 'exclude': lambda f: f is TrackLeadRequestBody.UNSET }}) + metadata: Optional[Nullable[Dict[str, Any]]] = None r"""Additional metadata to be stored with the lead event""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["customerName", "customerEmail", "customerAvatar", "metadata"] + nullable_fields = ["customerName", "customerEmail", "customerAvatar", "metadata"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TrackLeadResponseBody: + return m + + +class TrackLeadResponseBodyTypedDict(TypedDict): + r"""A lead was tracked.""" + + click_id: str + event_name: str + customer_id: str + customer_name: Nullable[str] + customer_email: Nullable[str] + customer_avatar: Nullable[str] + metadata: NotRequired[Dict[str, Any]] + + +class TrackLeadResponseBody(BaseModel): r"""A lead was tracked.""" - click_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clickId') }}) - event_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('eventName') }}) - customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerId') }}) - customer_name: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerName') }}) - customer_email: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerEmail') }}) - customer_avatar: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerAvatar') }}) - metadata: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata'), 'exclude': lambda f: f is None }}) + click_id: Annotated[str, pydantic.Field(alias="clickId")] + event_name: Annotated[str, pydantic.Field(alias="eventName")] + customer_id: Annotated[str, pydantic.Field(alias="customerId")] + customer_name: Annotated[Nullable[str], pydantic.Field(alias="customerName")] + customer_email: Annotated[Nullable[str], pydantic.Field(alias="customerEmail")] + customer_avatar: Annotated[Nullable[str], pydantic.Field(alias="customerAvatar")] + metadata: Optional[Dict[str, Any]] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["metadata"] + nullable_fields = ["customerName", "customerEmail", "customerAvatar"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/operations/tracksale.py b/src/dub/models/operations/tracksale.py index 9641384..d2dc9a7 100644 --- a/src/dub/models/operations/tracksale.py +++ b/src/dub/models/operations/tracksale.py @@ -1,60 +1,134 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata from enum import Enum -from typing import Any, Dict, Optional +import pydantic +from pydantic import model_serializer +from typing import Any, Dict, Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class TrackSaleGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class TrackSaleGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] - +class TrackSaleGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + class PaymentProcessor(str, Enum): r"""The payment processor via which the sale was made.""" - STRIPE = 'stripe' - SHOPIFY = 'shopify' - PADDLE = 'paddle' + STRIPE = "stripe" + SHOPIFY = "shopify" + PADDLE = "paddle" + +class TrackSaleRequestBodyTypedDict(TypedDict): + customer_id: str + r"""This is the unique identifier for the customer in the client's app. This is used to track the customer's journey.""" + amount: int + r"""The amount of the sale. Should be passed in cents.""" + payment_processor: PaymentProcessor + r"""The payment processor via which the sale was made.""" + event_name: NotRequired[str] + r"""The name of the sale event. It can be used to track different types of event for example 'Purchase', 'Upgrade', 'Payment', etc.""" + invoice_id: NotRequired[Nullable[str]] + r"""The invoice ID of the sale.""" + currency: NotRequired[str] + r"""The currency of the sale. Accepts ISO 4217 currency codes.""" + metadata: NotRequired[Nullable[Dict[str, Any]]] + r"""Additional metadata to be stored with the sale event.""" + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TrackSaleRequestBody: - UNSET='__SPEAKEASY_UNSET__' - customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerId') }}) +class TrackSaleRequestBody(BaseModel): + customer_id: Annotated[str, pydantic.Field(alias="customerId")] r"""This is the unique identifier for the customer in the client's app. This is used to track the customer's journey.""" - amount: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) + amount: int r"""The amount of the sale. Should be passed in cents.""" - payment_processor: PaymentProcessor = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('paymentProcessor') }}) + payment_processor: Annotated[PaymentProcessor, pydantic.Field(alias="paymentProcessor")] r"""The payment processor via which the sale was made.""" - event_name: Optional[str] = dataclasses.field(default='Purchase', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('eventName'), 'exclude': lambda f: f is None }}) + event_name: Annotated[Optional[str], pydantic.Field(alias="eventName")] = "Purchase" r"""The name of the sale event. It can be used to track different types of event for example 'Purchase', 'Upgrade', 'Payment', etc.""" - invoice_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('invoiceId'), 'exclude': lambda f: f is TrackSaleRequestBody.UNSET }}) + invoice_id: Annotated[Optional[Nullable[str]], pydantic.Field(alias="invoiceId")] = None r"""The invoice ID of the sale.""" - currency: Optional[str] = dataclasses.field(default='usd', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('currency'), 'exclude': lambda f: f is None }}) + currency: Optional[str] = "usd" r"""The currency of the sale. Accepts ISO 4217 currency codes.""" - metadata: Optional[Dict[str, Any]] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata'), 'exclude': lambda f: f is TrackSaleRequestBody.UNSET }}) + metadata: Optional[Nullable[Dict[str, Any]]] = None r"""Additional metadata to be stored with the sale event.""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["eventName", "invoiceId", "currency", "metadata"] + nullable_fields = ["invoiceId", "metadata"] + null_default_fields = ["invoiceId"] + + serialized = handler(self) + m = {} + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class TrackSaleResponseBody: + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + + return m + + +class TrackSaleResponseBodyTypedDict(TypedDict): + r"""A sale was tracked.""" + + event_name: str + customer_id: str + amount: float + payment_processor: str + invoice_id: Nullable[str] + currency: str + metadata: Nullable[Dict[str, Any]] + + +class TrackSaleResponseBody(BaseModel): r"""A sale was tracked.""" - event_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('eventName') }}) - customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customerId') }}) - amount: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('amount') }}) - payment_processor: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('paymentProcessor') }}) - invoice_id: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('invoiceId') }}) - currency: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('currency') }}) - metadata: Optional[Dict[str, Any]] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata') }}) + event_name: Annotated[str, pydantic.Field(alias="eventName")] + customer_id: Annotated[str, pydantic.Field(alias="customerId")] + amount: float + payment_processor: Annotated[str, pydantic.Field(alias="paymentProcessor")] + invoice_id: Annotated[Nullable[str], pydantic.Field(alias="invoiceId")] + currency: str + metadata: Nullable[Dict[str, Any]] + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = [] + nullable_fields = ["invoiceId", "metadata"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + return m + diff --git a/src/dub/models/operations/updatedomain.py b/src/dub/models/operations/updatedomain.py index c6d4aa7..2c169a1 100644 --- a/src/dub/models/operations/updatedomain.py +++ b/src/dub/models/operations/updatedomain.py @@ -1,40 +1,77 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import Optional +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata, RequestMetadata +import pydantic +from pydantic import model_serializer +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class UpdateDomainGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class UpdateDomainGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class UpdateDomainGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + +class UpdateDomainRequestBodyTypedDict(TypedDict): + slug: NotRequired[str] + r"""Name of the domain.""" + expired_url: NotRequired[Nullable[str]] + r"""Redirect users to a specific URL when any link under this domain has expired.""" + archived: NotRequired[bool] + r"""Whether to archive this domain. `false` will unarchive a previously archived domain.""" + placeholder: NotRequired[Nullable[str]] + r"""Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened.""" + -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UpdateDomainRequestBody: - UNSET='__SPEAKEASY_UNSET__' - slug: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slug'), 'exclude': lambda f: f is None }}) +class UpdateDomainRequestBody(BaseModel): + slug: Optional[str] = None r"""Name of the domain.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is UpdateDomainRequestBody.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""Redirect users to a specific URL when any link under this domain has expired.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: Optional[bool] = False r"""Whether to archive this domain. `false` will unarchive a previously archived domain.""" - placeholder: Optional[str] = dataclasses.field(default='https://dub.co/help/article/what-is-dub', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('placeholder'), 'exclude': lambda f: f is UpdateDomainRequestBody.UNSET }}) + placeholder: Optional[Nullable[str]] = "https://dub.co/help/article/what-is-dub" r"""Provide context to your teammates in the link creation modal by showing them an example of a link to be shortened.""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["slug", "expiredUrl", "archived", "placeholder"] + nullable_fields = ["expiredUrl", "placeholder"] + null_default_fields = [] + serialized = handler(self) + m = {} -@dataclasses.dataclass -class UpdateDomainRequest: - slug: str = dataclasses.field(metadata={'path_param': { 'field_name': 'slug', 'style': 'simple', 'explode': False }}) + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + + return m + + +class UpdateDomainRequestTypedDict(TypedDict): + slug: str r"""The domain name.""" - request_body: Optional[UpdateDomainRequestBody] = dataclasses.field(default=None, metadata={'request': { 'media_type': 'application/json' }}) + request_body: NotRequired[UpdateDomainRequestBodyTypedDict] +class UpdateDomainRequest(BaseModel): + slug: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The domain name.""" + request_body: Annotated[Optional[UpdateDomainRequestBody], FieldMetadata(request=RequestMetadata(media_type="application/json"))] = None + diff --git a/src/dub/models/operations/updatelink.py b/src/dub/models/operations/updatelink.py index 46078d1..51f6b9c 100644 --- a/src/dub/models/operations/updatelink.py +++ b/src/dub/models/operations/updatelink.py @@ -1,88 +1,172 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from ...models.components import linkgeotargeting as components_linkgeotargeting -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import List, Optional, Union +from dub.models.components import linkgeotargeting as components_linkgeotargeting +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata, RequestMetadata +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class UpdateLinkGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class UpdateLinkGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class UpdateLinkGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UpdateLinkRequestBody: - UNSET='__SPEAKEASY_UNSET__' - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class UpdateLinkRequestBodyTypedDict(TypedDict): + url: NotRequired[str] r"""The destination URL of the short link.""" - domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + domain: NotRequired[str] r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" - key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key'), 'exclude': lambda f: f is None }}) + key: NotRequired[str] r"""The short link slug. If not provided, a random 7-character slug will be generated.""" - external_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('externalId'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + external_id: NotRequired[Nullable[str]] r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" - prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) + prefix: NotRequired[str] r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" - track_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trackConversion'), 'exclude': lambda f: f is None }}) + track_conversion: NotRequired[bool] r"""Whether to track conversions for the short link.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: NotRequired[bool] r"""Whether the short link is archived.""" - public_stats: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicStats'), 'exclude': lambda f: f is None }}) + public_stats: NotRequired[bool] r"""Whether the short link's stats are publicly accessible.""" - tag_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) - r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. + tag_id: NotRequired[Nullable[str]] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: NotRequired[UpdateLinkTagIdsTypedDict] + r"""The unique IDs of the tags assigned to the short link.""" + tag_names: NotRequired[UpdateLinkTagNamesTypedDict] + r"""The unique name of the tags assigned to the short link (case insensitive).""" + comments: NotRequired[Nullable[str]] + r"""The comments for the short link.""" + expires_at: NotRequired[Nullable[str]] + r"""The date and time when the short link will expire at.""" + expired_url: NotRequired[Nullable[str]] + r"""The URL to redirect to when the short link has expired.""" + password: NotRequired[Nullable[str]] + r"""The password required to access the destination URL of the short link.""" + proxy: NotRequired[bool] + r"""Whether the short link uses Custom Social Media Cards feature.""" + title: NotRequired[Nullable[str]] + r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + description: NotRequired[Nullable[str]] + r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + image: NotRequired[Nullable[str]] + r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + rewrite: NotRequired[bool] + r"""Whether the short link uses link cloaking.""" + ios: NotRequired[Nullable[str]] + r"""The iOS destination URL for the short link for iOS device targeting.""" + android: NotRequired[Nullable[str]] + r"""The Android destination URL for the short link for Android device targeting.""" + geo: NotRequired[Nullable[components_linkgeotargeting.LinkGeoTargetingTypedDict]] + do_index: NotRequired[bool] + r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - tag_ids: Optional[UpdateLinkTagIds] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagIds'), 'exclude': lambda f: f is None }}) +class UpdateLinkRequestBody(BaseModel): + url: Optional[str] = None + r"""The destination URL of the short link.""" + domain: Optional[str] = None + r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" + key: Optional[str] = None + r"""The short link slug. If not provided, a random 7-character slug will be generated.""" + external_id: Annotated[Optional[Nullable[str]], pydantic.Field(alias="externalId")] = None + r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" + prefix: Optional[str] = None + r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" + track_conversion: Annotated[Optional[bool], pydantic.Field(alias="trackConversion")] = False + r"""Whether to track conversions for the short link.""" + archived: Optional[bool] = False + r"""Whether the short link is archived.""" + public_stats: Annotated[Optional[bool], pydantic.Field(alias="publicStats")] = False + r"""Whether the short link's stats are publicly accessible.""" + tag_id: Annotated[Optional[Nullable[str]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="tagId")] = None + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[UpdateLinkTagIds], pydantic.Field(alias="tagIds")] = None r"""The unique IDs of the tags assigned to the short link.""" - tag_names: Optional[UpdateLinkTagNames] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagNames'), 'exclude': lambda f: f is None }}) + tag_names: Annotated[Optional[UpdateLinkTagNames], pydantic.Field(alias="tagNames")] = None r"""The unique name of the tags assigned to the short link (case insensitive).""" - comments: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comments'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + comments: Optional[Nullable[str]] = None r"""The comments for the short link.""" - expires_at: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiresAt'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + expires_at: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiresAt")] = None r"""The date and time when the short link will expire at.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""The URL to redirect to when the short link has expired.""" - password: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + password: Optional[Nullable[str]] = None r"""The password required to access the destination URL of the short link.""" - proxy: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('proxy'), 'exclude': lambda f: f is None }}) + proxy: Optional[bool] = False r"""Whether the short link uses Custom Social Media Cards feature.""" - title: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + title: Optional[Nullable[str]] = None r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - description: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + description: Optional[Nullable[str]] = None r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - image: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + image: Optional[Nullable[str]] = None r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - rewrite: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rewrite'), 'exclude': lambda f: f is None }}) + rewrite: Optional[bool] = False r"""Whether the short link uses link cloaking.""" - ios: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ios'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + ios: Optional[Nullable[str]] = None r"""The iOS destination URL for the short link for iOS device targeting.""" - android: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('android'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) + android: Optional[Nullable[str]] = None r"""The Android destination URL for the short link for Android device targeting.""" - geo: Optional[components_linkgeotargeting.LinkGeoTargeting] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo'), 'exclude': lambda f: f is UpdateLinkRequestBody.UNSET }}) - r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" - do_index: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doIndex'), 'exclude': lambda f: f is None }}) + geo: Optional[Nullable[components_linkgeotargeting.LinkGeoTargeting]] = None + do_index: Annotated[Optional[bool], pydantic.Field(alias="doIndex")] = False r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["url", "domain", "key", "externalId", "prefix", "trackConversion", "archived", "publicStats", "tagId", "tagIds", "tagNames", "comments", "expiresAt", "expiredUrl", "password", "proxy", "title", "description", "image", "rewrite", "ios", "android", "geo", "doIndex"] + nullable_fields = ["externalId", "tagId", "comments", "expiresAt", "expiredUrl", "password", "title", "description", "image", "ios", "android", "geo"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val -@dataclasses.dataclass -class UpdateLinkRequest: - link_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'linkId', 'style': 'simple', 'explode': False }}) + return m + + +class UpdateLinkRequestTypedDict(TypedDict): + link_id: str r"""The id of the link to update. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`.""" - request_body: Optional[UpdateLinkRequestBody] = dataclasses.field(default=None, metadata={'request': { 'media_type': 'application/json' }}) + request_body: NotRequired[UpdateLinkRequestBodyTypedDict] +class UpdateLinkRequest(BaseModel): + link_id: Annotated[str, pydantic.Field(alias="linkId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The id of the link to update. You may use either `linkId` (obtained via `/links/info` endpoint) or `externalId` prefixed with `ext_`.""" + request_body: Annotated[Optional[UpdateLinkRequestBody], FieldMetadata(request=RequestMetadata(media_type="application/json"))] = None + + +UpdateLinkTagIdsTypedDict = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + UpdateLinkTagIds = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + + +UpdateLinkTagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + UpdateLinkTagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + diff --git a/src/dub/models/operations/updatetag.py b/src/dub/models/operations/updatetag.py index eaa15ea..14cb5b4 100644 --- a/src/dub/models/operations/updatetag.py +++ b/src/dub/models/operations/updatetag.py @@ -1,52 +1,59 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from dub import utils +from dub.types import BaseModel +from dub.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata, RequestMetadata from enum import Enum -from typing import Optional +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class UpdateTagGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class UpdateTagGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] - +class UpdateTagGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + class UpdateTagColor(str, Enum): r"""The color of the tag. If not provided, a random color will be used from the list: red, yellow, green, blue, purple, pink, brown.""" - RED = 'red' - YELLOW = 'yellow' - GREEN = 'green' - BLUE = 'blue' - PURPLE = 'purple' - PINK = 'pink' - BROWN = 'brown' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UpdateTagRequestBody: - name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + RED = "red" + YELLOW = "yellow" + GREEN = "green" + BLUE = "blue" + PURPLE = "purple" + PINK = "pink" + BROWN = "brown" + + +class UpdateTagRequestBodyTypedDict(TypedDict): + name: NotRequired[str] r"""The name of the tag to create.""" - color: Optional[UpdateTagColor] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color'), 'exclude': lambda f: f is None }}) + color: NotRequired[UpdateTagColor] r"""The color of the tag. If not provided, a random color will be used from the list: red, yellow, green, blue, purple, pink, brown.""" - tag: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tag'), 'exclude': lambda f: f is None }}) - r"""The name of the tag to create. - - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ + tag: NotRequired[str] + r"""The name of the tag to create.""" +class UpdateTagRequestBody(BaseModel): + name: Optional[str] = None + r"""The name of the tag to create.""" + color: Optional[UpdateTagColor] = None + r"""The color of the tag. If not provided, a random color will be used from the list: red, yellow, green, blue, purple, pink, brown.""" + tag: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.")] = None + r"""The name of the tag to create.""" + - -@dataclasses.dataclass -class UpdateTagRequest: - id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'id', 'style': 'simple', 'explode': False }}) - r"""The ID of the tag""" - request_body: Optional[UpdateTagRequestBody] = dataclasses.field(default=None, metadata={'request': { 'media_type': 'application/json' }}) +class UpdateTagRequestTypedDict(TypedDict): + id: str + r"""The ID of the tag to update.""" + request_body: NotRequired[UpdateTagRequestBodyTypedDict] +class UpdateTagRequest(BaseModel): + id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The ID of the tag to update.""" + request_body: Annotated[Optional[UpdateTagRequestBody], FieldMetadata(request=RequestMetadata(media_type="application/json"))] = None + diff --git a/src/dub/models/operations/updateworkspace.py b/src/dub/models/operations/updateworkspace.py index 7f34e02..676b78c 100644 --- a/src/dub/models/operations/updateworkspace.py +++ b/src/dub/models/operations/updateworkspace.py @@ -1,12 +1,31 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses +from dub.types import BaseModel +from dub.utils import FieldMetadata, PathParamMetadata, RequestMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class UpdateWorkspaceRequest: - id_or_slug: str = dataclasses.field(metadata={'path_param': { 'field_name': 'idOrSlug', 'style': 'simple', 'explode': False }}) - r"""The ID or slug of the workspace.""" +class UpdateWorkspaceRequestBodyTypedDict(TypedDict): + name: NotRequired[str] + slug: NotRequired[str] +class UpdateWorkspaceRequestBody(BaseModel): + name: Optional[str] = None + slug: Optional[str] = None + + +class UpdateWorkspaceRequestTypedDict(TypedDict): + id_or_slug: str + r"""The ID or slug of the workspace to update.""" + request_body: NotRequired[UpdateWorkspaceRequestBodyTypedDict] + + +class UpdateWorkspaceRequest(BaseModel): + id_or_slug: Annotated[str, pydantic.Field(alias="idOrSlug"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""The ID or slug of the workspace to update.""" + request_body: Annotated[Optional[UpdateWorkspaceRequestBody], FieldMetadata(request=RequestMetadata(media_type="application/json"))] = None + diff --git a/src/dub/models/operations/upsertlink.py b/src/dub/models/operations/upsertlink.py index e402a09..a4557e8 100644 --- a/src/dub/models/operations/upsertlink.py +++ b/src/dub/models/operations/upsertlink.py @@ -1,79 +1,162 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" from __future__ import annotations -import dataclasses -from ...models.components import linkgeotargeting as components_linkgeotargeting -from dataclasses_json import Undefined, dataclass_json -from dub import utils -from typing import List, Optional, Union +from dub.models.components import linkgeotargeting as components_linkgeotargeting +from dub.types import BaseModel, Nullable +from dub.utils import FieldMetadata, QueryParamMetadata +import pydantic +from pydantic import model_serializer +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired -@dataclasses.dataclass -class UpsertLinkGlobals: - workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceId', 'style': 'form', 'explode': True }}) - r"""Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible.""" +class UpsertLinkGlobalsTypedDict(TypedDict): + workspace_id: NotRequired[str] +class UpsertLinkGlobals(BaseModel): + workspace_id: Annotated[Optional[str], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="workspaceId"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UpsertLinkRequestBody: - UNSET='__SPEAKEASY_UNSET__' - url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) +class UpsertLinkRequestBodyTypedDict(TypedDict): + url: str r"""The destination URL of the short link.""" - domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + domain: NotRequired[str] r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" - key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key'), 'exclude': lambda f: f is None }}) + key: NotRequired[str] r"""The short link slug. If not provided, a random 7-character slug will be generated.""" - external_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('externalId'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + external_id: NotRequired[Nullable[str]] r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" - prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) + prefix: NotRequired[str] r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" - track_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trackConversion'), 'exclude': lambda f: f is None }}) + track_conversion: NotRequired[bool] r"""Whether to track conversions for the short link.""" - archived: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('archived'), 'exclude': lambda f: f is None }}) + archived: NotRequired[bool] r"""Whether the short link is archived.""" - public_stats: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicStats'), 'exclude': lambda f: f is None }}) + public_stats: NotRequired[bool] r"""Whether the short link's stats are publicly accessible.""" - tag_id: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) - r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead. + tag_id: NotRequired[Nullable[str]] + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: NotRequired[UpsertLinkTagIdsTypedDict] + r"""The unique IDs of the tags assigned to the short link.""" + tag_names: NotRequired[UpsertLinkTagNamesTypedDict] + r"""The unique name of the tags assigned to the short link (case insensitive).""" + comments: NotRequired[Nullable[str]] + r"""The comments for the short link.""" + expires_at: NotRequired[Nullable[str]] + r"""The date and time when the short link will expire at.""" + expired_url: NotRequired[Nullable[str]] + r"""The URL to redirect to when the short link has expired.""" + password: NotRequired[Nullable[str]] + r"""The password required to access the destination URL of the short link.""" + proxy: NotRequired[bool] + r"""Whether the short link uses Custom Social Media Cards feature.""" + title: NotRequired[Nullable[str]] + r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + description: NotRequired[Nullable[str]] + r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + image: NotRequired[Nullable[str]] + r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" + rewrite: NotRequired[bool] + r"""Whether the short link uses link cloaking.""" + ios: NotRequired[Nullable[str]] + r"""The iOS destination URL for the short link for iOS device targeting.""" + android: NotRequired[Nullable[str]] + r"""The Android destination URL for the short link for Android device targeting.""" + geo: NotRequired[Nullable[components_linkgeotargeting.LinkGeoTargetingTypedDict]] + r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" + do_index: NotRequired[bool] + r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + - Deprecated field: This will be removed in a future release, please migrate away from it as soon as possible. - """ - tag_ids: Optional[UpsertLinkTagIds] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagIds'), 'exclude': lambda f: f is None }}) +class UpsertLinkRequestBody(BaseModel): + url: str + r"""The destination URL of the short link.""" + domain: Optional[str] = None + r"""The domain of the short link. If not provided, the primary domain for the workspace will be used (or `dub.sh` if the workspace has no domains).""" + key: Optional[str] = None + r"""The short link slug. If not provided, a random 7-character slug will be generated.""" + external_id: Annotated[Optional[Nullable[str]], pydantic.Field(alias="externalId")] = None + r"""This is the ID of the link in your database. If set, it can be used to identify the link in the future. Must be prefixed with `ext_` when passed as a query parameter.""" + prefix: Optional[str] = None + r"""The prefix of the short link slug for randomly-generated keys (e.g. if prefix is `/c/`, generated keys will be in the `/c/:key` format). Will be ignored if `key` is provided.""" + track_conversion: Annotated[Optional[bool], pydantic.Field(alias="trackConversion")] = False + r"""Whether to track conversions for the short link.""" + archived: Optional[bool] = False + r"""Whether the short link is archived.""" + public_stats: Annotated[Optional[bool], pydantic.Field(alias="publicStats")] = False + r"""Whether the short link's stats are publicly accessible.""" + tag_id: Annotated[Optional[Nullable[str]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="tagId")] = None + r"""The unique ID of the tag assigned to the short link. This field is deprecated – use `tagIds` instead.""" + tag_ids: Annotated[Optional[UpsertLinkTagIds], pydantic.Field(alias="tagIds")] = None r"""The unique IDs of the tags assigned to the short link.""" - tag_names: Optional[UpsertLinkTagNames] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagNames'), 'exclude': lambda f: f is None }}) + tag_names: Annotated[Optional[UpsertLinkTagNames], pydantic.Field(alias="tagNames")] = None r"""The unique name of the tags assigned to the short link (case insensitive).""" - comments: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comments'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + comments: Optional[Nullable[str]] = None r"""The comments for the short link.""" - expires_at: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiresAt'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + expires_at: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiresAt")] = None r"""The date and time when the short link will expire at.""" - expired_url: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expiredUrl'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + expired_url: Annotated[Optional[Nullable[str]], pydantic.Field(alias="expiredUrl")] = None r"""The URL to redirect to when the short link has expired.""" - password: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + password: Optional[Nullable[str]] = None r"""The password required to access the destination URL of the short link.""" - proxy: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('proxy'), 'exclude': lambda f: f is None }}) + proxy: Optional[bool] = False r"""Whether the short link uses Custom Social Media Cards feature.""" - title: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + title: Optional[Nullable[str]] = None r"""The title of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - description: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + description: Optional[Nullable[str]] = None r"""The description of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - image: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('image'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + image: Optional[Nullable[str]] = None r"""The image of the short link generated via `api.dub.co/metatags`. Will be used for Custom Social Media Cards if `proxy` is true.""" - rewrite: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rewrite'), 'exclude': lambda f: f is None }}) + rewrite: Optional[bool] = False r"""Whether the short link uses link cloaking.""" - ios: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ios'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + ios: Optional[Nullable[str]] = None r"""The iOS destination URL for the short link for iOS device targeting.""" - android: Optional[str] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('android'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + android: Optional[Nullable[str]] = None r"""The Android destination URL for the short link for Android device targeting.""" - geo: Optional[components_linkgeotargeting.LinkGeoTargeting] = dataclasses.field(default=UNSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo'), 'exclude': lambda f: f is UpsertLinkRequestBody.UNSET }}) + geo: Optional[Nullable[components_linkgeotargeting.LinkGeoTargeting]] = None r"""Geo targeting information for the short link in JSON format `{[COUNTRY]: https://example.com }`.""" - do_index: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('doIndex'), 'exclude': lambda f: f is None }}) + do_index: Annotated[Optional[bool], pydantic.Field(alias="doIndex")] = False r"""Allow search engines to index your short link. Defaults to `false` if not provided. Learn more: https://d.to/noindex""" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["domain", "key", "externalId", "prefix", "trackConversion", "archived", "publicStats", "tagId", "tagIds", "tagNames", "comments", "expiresAt", "expiredUrl", "password", "proxy", "title", "description", "image", "rewrite", "ios", "android", "geo", "doIndex"] + nullable_fields = ["externalId", "tagId", "comments", "expiresAt", "expiredUrl", "password", "title", "description", "image", "ios", "android", "geo"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in self.model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val is not None: + m[k] = val + elif not k in optional_fields or ( + k in optional_fields + and k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member + ): + m[k] = val + + return m + + +UpsertLinkTagIdsTypedDict = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" UpsertLinkTagIds = Union[str, List[str]] +r"""The unique IDs of the tags assigned to the short link.""" + + +UpsertLinkTagNamesTypedDict = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + UpsertLinkTagNames = Union[str, List[str]] +r"""The unique name of the tags assigned to the short link (case insensitive).""" + diff --git a/src/dub/qr_codes.py b/src/dub/qr_codes.py index 6fe6c01..ad0e5ee 100644 --- a/src/dub/qr_codes.py +++ b/src/dub/qr_codes.py @@ -1,142 +1,165 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import errors, operations +from dub.types import BaseModel +import dub.utils as utils +from typing import Optional, Union -class QRCodes: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class QRCodes(BaseSDK): - def get(self, request: operations.GetQRCodeRequest) -> str: + def get( + self, *, + request: Union[operations.GetQRCodeRequest, operations.GetQRCodeRequestTypedDict], + server_url: Optional[str] = None, + ) -> str: r"""Retrieve a QR code + Retrieve a QR code for a link. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getQRCode', oauth2_scopes=[], security_source=self.sdk_configuration.security) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetQRCodeRequest) - url = base_url + '/qr' + req = self.build_request( + method="GET", + path="/qr", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="image/png", + security=self.sdk_configuration.security, + ) - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getQRCode", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) - query_params = { **utils.get_query_params(request), **query_params } - headers['Accept'] = 'image/png' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + if utils.match_response(http_res, "200", "image/png"): + return http_res.text + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def get_async( + self, *, + request: Union[operations.GetQRCodeRequest, operations.GetQRCodeRequestTypedDict], + server_url: Optional[str] = None, + ) -> str: + r"""Retrieve a QR code - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - + Retrieve a QR code for a link. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetQRCodeRequest) + req = self.build_request( + method="GET", + path="/qr", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="image/png", + security=self.sdk_configuration.security, + ) - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'image/png'): - return http_res.text - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) - + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getQRCode", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "image/png"): + return http_res.text + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/sdk.py b/src/dub/sdk.py index 55f8fa5..7c29b23 100644 --- a/src/dub/sdk.py +++ b/src/dub/sdk.py @@ -1,22 +1,25 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .analytics import Analytics -from .domains import Domains -from .links import Links -from .metatags import Metatags -from .qr_codes import QRCodes +from .basesdk import BaseSDK +from .httpclient import AsyncHttpClient, HttpClient from .sdkconfiguration import SDKConfiguration -from .tags import Tags -from .track import Track from .utils.retries import RetryConfig -from .workspaces import Workspaces -from dub import utils from dub._hooks import SDKHooks +from dub.analytics import Analytics +from dub.domains import Domains +from dub.links import Links +from dub.metatags import Metatags from dub.models import components, internal +from dub.qr_codes import QRCodes +from dub.tags import Tags +from dub.track import Track +from dub.types import Nullable, UNSET +import dub.utils as utils +from dub.workspaces import Workspaces +import httpx from typing import Callable, Dict, Optional, Union -class Dub: +class Dub(BaseSDK): r"""Dub.co API: Dub is link management infrastructure for companies to create marketing campaigns, link sharing features, and referral programs.""" links: Links qr_codes: QRCodes @@ -26,44 +29,47 @@ class Dub: domains: Domains track: Track metatags: Metatags - - sdk_configuration: SDKConfiguration - - def __init__(self, - token: Union[Optional[str], Callable[[], Optional[str]]] = None, - workspace_id: str = None, - project_slug: str = None, - server_idx: Optional[int] = None, - server_url: Optional[str] = None, - url_params: Optional[Dict[str, str]] = None, - client: Optional[requests_http.Session] = None, - retry_config: Optional[RetryConfig] = None - ) -> None: - """Instantiates the SDK configuring it with the provided parameters. + def __init__( + self, + token: Optional[Union[Optional[str], Callable[[], Optional[str]]]] = None, + workspace_id: Optional[str] = None, + project_slug: Optional[str] = None, + server_idx: Optional[int] = None, + server_url: Optional[str] = None, + url_params: Optional[Dict[str, str]] = None, + client: Optional[HttpClient] = None, + async_client: Optional[AsyncHttpClient] = None, + retry_config: Optional[Nullable[RetryConfig]] = UNSET + ) -> None: + r"""Instantiates the SDK configuring it with the provided parameters. :param token: The token required for authentication - :type token: Union[Optional[str], Callable[[], Optional[str]]] :param workspace_id: Configures the workspace_id parameter for all supported operations - :type workspace_id: str :param project_slug: Configures the project_slug parameter for all supported operations - :type project_slug: str - :param server_idx: The index of the server to use for all operations - :type server_idx: int - :param server_url: The server URL to use for all operations - :type server_url: str + :param server_idx: The index of the server to use for all methods + :param server_url: The server URL to use for all methods :param url_params: Parameters to optionally template the server URL with - :type url_params: Dict[str, str] - :param client: The requests.Session HTTP client to use for all operations - :type client: requests_http.Session - :param retry_config: The utils.RetryConfig to use globally - :type retry_config: RetryConfig + :param client: The HTTP client to use for all synchronous methods + :param async_client: The Async HTTP client to use for all asynchronous methods + :param retry_config: The retry configuration to use for all supported methods """ if client is None: - client = requests_http.Session() + client = httpx.Client() + + assert issubclass( + type(client), HttpClient + ), "The provided client must implement the HttpClient protocol." + + if async_client is None: + async_client = httpx.AsyncClient() + assert issubclass( + type(async_client), AsyncHttpClient + ), "The provided async_client must implement the AsyncHttpClient protocol." + + security = None if callable(token): - def security(): - return components.Security(token = token()) + security = lambda: components.Security(token = token()) # pylint: disable=unnecessary-lambda-assignment else: security = components.Security(token = token) @@ -76,14 +82,15 @@ def security(): project_slug=project_slug, ) - self.sdk_configuration = SDKConfiguration( - client, - _globals, - security, - server_url, - server_idx, + BaseSDK.__init__(self, SDKConfiguration( + client=client, + async_client=async_client, + globals=_globals, + security=security, + server_url=server_url, + server_idx=server_idx, retry_config=retry_config - ) + )) hooks = SDKHooks() @@ -93,7 +100,7 @@ def security(): self.sdk_configuration.server_url = server_url # pylint: disable=protected-access - self.sdk_configuration.__dict__['_hooks'] = hooks + self.sdk_configuration.__dict__["_hooks"] = hooks self._init_sdks() @@ -107,3 +114,4 @@ def _init_sdks(self): self.domains = Domains(self.sdk_configuration) self.track = Track(self.sdk_configuration) self.metatags = Metatags(self.sdk_configuration) + diff --git a/src/dub/sdkconfiguration.py b/src/dub/sdkconfiguration.py index d9a9c65..695f9dd 100644 --- a/src/dub/sdkconfiguration.py +++ b/src/dub/sdkconfiguration.py @@ -1,41 +1,42 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http from ._hooks import SDKHooks -from .utils import utils -from .utils.retries import RetryConfig +from .httpclient import AsyncHttpClient, HttpClient +from .utils import RetryConfig, remove_suffix from dataclasses import dataclass from dub.models import components, internal +from dub.types import Nullable, UNSET from typing import Callable, Dict, Optional, Tuple, Union SERVERS = [ - 'https://api.dub.co', + "https://api.dub.co", # Production API ] """Contains the list of servers available to the SDK""" @dataclass class SDKConfiguration: - client: requests_http.Session + client: HttpClient + async_client: AsyncHttpClient globals: internal.Globals - security: Union[components.Security,Callable[[], components.Security]] = None - server_url: Optional[str] = '' + security: Optional[Union[components.Security,Callable[[], components.Security]]] = None + server_url: Optional[str] = "" server_idx: Optional[int] = 0 - language: str = 'python' - openapi_doc_version: str = '0.0.1' - sdk_version: str = '0.0.18' - gen_version: str = '2.359.6' - user_agent: str = 'speakeasy-sdk/python 0.0.18 2.359.6 0.0.1 dub' - retry_config: Optional[RetryConfig] = None + language: str = "python" + openapi_doc_version: str = "0.0.1" + sdk_version: str = "0.0.19" + gen_version: str = "2.359.6" + user_agent: str = "speakeasy-sdk/python 0.0.19 2.359.6 0.0.1 dub" + retry_config: Optional[Nullable[RetryConfig]] = UNSET def __post_init__(self): self._hooks = SDKHooks() def get_server_details(self) -> Tuple[str, Dict[str, str]]: - if self.server_url is not None and self.server_url != '': - return utils.remove_suffix(self.server_url, '/'), {} + if self.server_url is not None and self.server_url: + return remove_suffix(self.server_url, "/"), {} if self.server_idx is None: self.server_idx = 0 diff --git a/src/dub/tags.py b/src/dub/tags.py index 81211a1..d9adac6 100644 --- a/src/dub/tags.py +++ b/src/dub/tags.py @@ -1,419 +1,499 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import components, errors, operations -from typing import List, Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import List, Optional, Union -class Tags: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Tags(BaseSDK): - def list(self, request: operations.GetTagsRequest) -> List[components.TagSchema]: + def list( + self, *, + server_url: Optional[str] = None, + ) -> List[components.TagSchema]: r"""Retrieve a list of tags + Retrieve a list of tags for the authenticated workspace. + + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getTags', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.GetTagsGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.GetTagsRequest( + ) + + req = self.build_request( + method="GET", + path="/tags", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetTagsGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getTags", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/tags', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.TagSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def list_async( + self, *, + server_url: Optional[str] = None, + ) -> List[components.TagSchema]: + r"""Retrieve a list of tags - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[List[components.TagSchema]]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Retrieve a list of tags for the authenticated workspace. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.GetTagsRequest( + ) + + req = self.build_request( + method="GET", + path="/tags", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.GetTagsGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getTags", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[components.TagSchema]]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def create(self, request: Optional[operations.CreateTagRequestBody] = None) -> components.TagSchema: + def create( + self, *, + request: Optional[Union[operations.CreateTagRequestBody, operations.CreateTagRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.TagSchema: r"""Create a new tag + Create a new tag for the authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='createTag', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.CreateTagGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.CreateTagRequestBody) + + req = self.build_request( + method="POST", + path="/tags", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.CreateTagGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.CreateTagRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="createTag", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/tags', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.CreateTagRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "201", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.TagSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def create_async( + self, *, + request: Optional[Union[operations.CreateTagRequestBody, operations.CreateTagRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.TagSchema: + r"""Create a new tag - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 201: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.TagSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Create a new tag for the authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.CreateTagRequestBody) + + req = self.build_request( + method="POST", + path="/tags", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.CreateTagGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.CreateTagRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="createTag", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "201", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.TagSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def update(self, request: operations.UpdateTagRequest) -> components.TagSchema: + def update( + self, *, + id: str, + request_body: Optional[Union[operations.UpdateTagRequestBody, operations.UpdateTagRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.TagSchema: r"""Update a tag + Update a tag in the workspace. + + :param id: The ID of the tag to update. + :param request_body: + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='updateTag', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.UpdateTagGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.UpdateTagRequest( + id=id, + request_body=utils.unmarshal(request_body, operations.UpdateTagRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + + req = self.build_request( + method="PATCH", + path="/tags/{id}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpdateTagGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateTagRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="updateTag", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/tags/{id}', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, operations.UpdateTagRequest, "request_body", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('PATCH', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.TagSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def update_async( + self, *, + id: str, + request_body: Optional[Union[operations.UpdateTagRequestBody, operations.UpdateTagRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.TagSchema: + r"""Update a tag - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.TagSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Update a tag in the workspace. + :param id: The ID of the tag to update. + :param request_body: + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.UpdateTagRequest( + id=id, + request_body=utils.unmarshal(request_body, operations.UpdateTagRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + + req = self.build_request( + method="PATCH", + path="/tags/{id}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.UpdateTagGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateTagRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="updateTag", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.TagSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/track.py b/src/dub/track.py index 0ea9d06..aa1ca76 100644 --- a/src/dub/track.py +++ b/src/dub/track.py @@ -1,422 +1,497 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import errors, operations -from typing import Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import Optional, Union -class Track: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Track(BaseSDK): - def lead(self, request: Optional[operations.TrackLeadRequestBody] = None) -> operations.TrackLeadResponseBody: + def lead( + self, *, + request: Optional[Union[operations.TrackLeadRequestBody, operations.TrackLeadRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.TrackLeadResponseBody: r"""Track a lead + Track a lead for a short link. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='trackLead', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.TrackLeadGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.TrackLeadRequestBody) + + req = self.build_request( + method="POST", + path="/track/lead", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.TrackLeadGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.TrackLeadRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="trackLead", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/track/lead', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.TrackLeadRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.TrackLeadResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def lead_async( + self, *, + request: Optional[Union[operations.TrackLeadRequestBody, operations.TrackLeadRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.TrackLeadResponseBody: + r"""Track a lead - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.TrackLeadResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Track a lead for a short link. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.TrackLeadRequestBody) + + req = self.build_request( + method="POST", + path="/track/lead", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.TrackLeadGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.TrackLeadRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="trackLead", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.TrackLeadResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def sale(self, request: Optional[operations.TrackSaleRequestBody] = None) -> operations.TrackSaleResponseBody: + def sale( + self, *, + request: Optional[Union[operations.TrackSaleRequestBody, operations.TrackSaleRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.TrackSaleResponseBody: r"""Track a sale + Track a sale for a short link. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='trackSale', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.TrackSaleGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.TrackSaleRequestBody) + + req = self.build_request( + method="POST", + path="/track/sale", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.TrackSaleGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.TrackSaleRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="trackSale", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/track/sale', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.TrackSaleRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.TrackSaleResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def sale_async( + self, *, + request: Optional[Union[operations.TrackSaleRequestBody, operations.TrackSaleRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.TrackSaleResponseBody: + r"""Track a sale - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.TrackSaleResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Track a sale for a short link. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.TrackSaleRequestBody) + + req = self.build_request( + method="POST", + path="/track/sale", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.TrackSaleGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.TrackSaleRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="trackSale", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.TrackSaleResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def customer(self, request: Optional[operations.TrackCustomerRequestBody] = None) -> operations.TrackCustomerResponseBody: + def customer( + self, *, + request: Optional[Union[operations.TrackCustomerRequestBody, operations.TrackCustomerRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.TrackCustomerResponseBody: r"""Track a customer + Track a customer for an authenticated workspace. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='trackCustomer', oauth2_scopes=[], security_source=self.sdk_configuration.security) - _globals = operations.TrackCustomerGlobals( - workspace_id=self.sdk_configuration.globals.workspace_id, + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.TrackCustomerRequestBody) + + req = self.build_request( + method="POST", + path="/track/customer", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.TrackCustomerGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.TrackCustomerRequestBody]), + ) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="trackCustomer", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], ) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) - - url = utils.generate_url(base_url, '/track/customer', request, _globals) - - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - - headers = { **utils.get_headers(request, _globals), **headers } - req_content_type, data, form = utils.serialize_request_body(request, Optional[operations.TrackCustomerRequestBody], "request", False, True, 'json') - if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - query_params = { **utils.get_query_params(request, _globals), **query_params } - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client - - try: - req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.TrackCustomerResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def customer_async( + self, *, + request: Optional[Union[operations.TrackCustomerRequestBody, operations.TrackCustomerRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> operations.TrackCustomerResponseBody: + r"""Track a customer - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - - - - - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[operations.TrackCustomerResponseBody]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + Track a customer for an authenticated workspace. + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel) and request is not None: + request = utils.unmarshal(request, operations.TrackCustomerRequestBody) + + req = self.build_request( + method="POST", + path="/track/customer", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + _globals=operations.TrackCustomerGlobals( + workspace_id=self.sdk_configuration.globals.workspace_id, + ), + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, True, "json", Optional[operations.TrackCustomerRequestBody]), + ) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="trackCustomer", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[operations.TrackCustomerResponseBody]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - diff --git a/src/dub/types/__init__.py b/src/dub/types/__init__.py new file mode 100644 index 0000000..f2314e9 --- /dev/null +++ b/src/dub/types/__init__.py @@ -0,0 +1,9 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from .basemodel import UNSET, Nullable, BaseModel + +__all__ = [ + "UNSET", + "Nullable", + "BaseModel", +] diff --git a/src/dub/types/basemodel.py b/src/dub/types/basemodel.py new file mode 100644 index 0000000..969e85a --- /dev/null +++ b/src/dub/types/basemodel.py @@ -0,0 +1,18 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from pydantic import ConfigDict +from pydantic import BaseModel as PydanticBaseModel +from typing import TypeVar, Union +from typing_extensions import TypeAliasType + + +class UNSET: + pass + + +T = TypeVar("T") +Nullable = TypeAliasType("Nullable", Union[T, None], type_params=(T,)) + + +class BaseModel(PydanticBaseModel): + model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True) diff --git a/src/dub/utils/__init__.py b/src/dub/utils/__init__.py index 94b7398..344116a 100644 --- a/src/dub/utils/__init__.py +++ b/src/dub/utils/__init__.py @@ -1,4 +1,73 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -from .retries import * -from .utils import * +from .enums import OpenEnumMeta +from .headers import get_headers, get_response_headers +from .metadata import ( + FieldMetadata, + find_metadata, + FormMetadata, + HeaderMetadata, + MultipartFormMetadata, + PathParamMetadata, + QueryParamMetadata, + RequestMetadata, + SecurityMetadata, +) +from .queryparams import get_query_params +from .retries import BackoffStrategy, Retries, retry, retry_async, RetryConfig +from .requestbodies import serialize_request_body, SerializedRequestBody +from .security import get_security +from .serializers import ( + marshal_json, + unmarshal, + unmarshal_json, + serialize_decimal, + serialize_float, + serialize_int, + validate_decimal, + validate_float, + validate_int, + validate_open_enum, +) +from .url import generate_url, template_url, remove_suffix +from .values import match_content_type, match_status_codes, match_response + +__all__ = [ + "BackoffStrategy", + "FieldMetadata", + "find_metadata", + "FormMetadata", + "generate_url", + "get_headers", + "get_query_params", + "get_response_headers", + "get_security", + "HeaderMetadata", + "marshal_json", + "match_content_type", + "match_status_codes", + "match_response", + "MultipartFormMetadata", + "OpenEnumMeta", + "PathParamMetadata", + "QueryParamMetadata", + "remove_suffix", + "Retries", + "retry", + "retry_async", + "RetryConfig", + "RequestMetadata", + "SecurityMetadata", + "serialize_decimal", + "serialize_float", + "serialize_int", + "serialize_request_body", + "SerializedRequestBody", + "template_url", + "unmarshal", + "unmarshal_json", + "validate_decimal", + "validate_float", + "validate_int", + "validate_open_enum", +] diff --git a/src/dub/utils/enums.py b/src/dub/utils/enums.py new file mode 100644 index 0000000..2804f7d --- /dev/null +++ b/src/dub/utils/enums.py @@ -0,0 +1,34 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +import enum + + +class OpenEnumMeta(enum.EnumMeta): + def __call__( + cls, value, names=None, *, module=None, qualname=None, type=None, start=1 + ): + # The `type` kwarg also happens to be a built-in that pylint flags as + # redeclared. Safe to ignore this lint rule with this scope. + # pylint: disable=redefined-builtin + + if names is not None: + return super().__call__( + value, + names=names, + module=module, + qualname=qualname, + type=type, + start=start, + ) + + try: + return super().__call__( + value, + names=names, # pyright: ignore[reportArgumentType] + module=module, + qualname=qualname, + type=type, + start=start, + ) + except ValueError: + return value diff --git a/src/dub/utils/eventstreaming.py b/src/dub/utils/eventstreaming.py new file mode 100644 index 0000000..d3470d8 --- /dev/null +++ b/src/dub/utils/eventstreaming.py @@ -0,0 +1,122 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +import re +import json +from typing import Callable, Iterator, TypeVar, Optional, Generator + +T = TypeVar("T") + + +class ServerEvent: + id: Optional[str] = None + event: Optional[str] = None + data: Optional[str] = None + retry: Optional[int] = None + + +MESSAGE_BOUNDARIES = [ + b"\r\n\r\n", + b"\n\n", + b"\r\r", +] + + +def stream_events( + stream: Iterator[bytes], decoder: Callable[[str], T] +) -> Generator[T, None, None]: + buffer = bytearray() + position = 0 + for chunk in stream: + buffer += chunk + for i in range(position, len(buffer)): + char = buffer[i : i + 1] + seq: Optional[bytes] = None + if char in [b"\r", b"\n"]: + for boundary in MESSAGE_BOUNDARIES: + seq = _peek_sequence(i, buffer, boundary) + if seq is not None: + break + if seq is None: + continue + + block = buffer[position:i] + position = i + len(seq) + event = _parse_event(block, decoder) + if event is not None: + yield event + + if position > 0: + buffer = buffer[position:] + position = 0 + + event = _parse_event(buffer, decoder) + if event is not None: + yield event + + +def _parse_event(raw: bytearray, decoder: Callable[[str], T]): + block = raw.decode() + lines = re.split(r"\r?\n|\r", block) + publish = False + event = ServerEvent() + data = "" + for line in lines: + if not line: + continue + + delim = line.find(":") + if delim <= 0: + continue + + field = line[0:delim] + value = line[delim + 1 :] if delim < len(line) - 1 else "" + if len(value) and value[0] == " ": + value = value[1:] + + if field == "event": + event.event = value + publish = True + elif field == "data": + data += value + "\n" + publish = True + elif field == "id": + event.id = value + publish = True + elif field == "retry": + event.retry = int(value) if value.isdigit() else None + publish = True + + if data: + data = data[:-1] + event.data = data + + if ( + data.isnumeric() + or data == "true" + or data == "false" + or data == "null" + or data.startswith("{") + or data.startswith("[") + or data.startswith('"') + ): + try: + event.data = json.loads(data) + except Exception: + pass + + out = None + if publish: + out = decoder(json.dumps(event.__dict__)) + + return out + + +def _peek_sequence(position: int, buffer: bytearray, sequence: bytes): + if len(sequence) > (len(buffer) - position): + return None + + for i, seq in enumerate(sequence): + if buffer[position + i] != seq: + return None + + return sequence diff --git a/src/dub/utils/forms.py b/src/dub/utils/forms.py new file mode 100644 index 0000000..4df2b8d --- /dev/null +++ b/src/dub/utils/forms.py @@ -0,0 +1,207 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from typing import ( + Any, + Dict, + get_type_hints, + List, + Tuple, +) +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .serializers import marshal_json + +from .metadata import ( + FormMetadata, + MultipartFormMetadata, + find_field_metadata, +) +from .values import _val_to_string + + +def _populate_form( + field_name: str, + explode: bool, + obj: Any, + delimiter: str, + form: Dict[str, List[str]], +): + if obj is None: + return form + + if isinstance(obj, BaseModel): + items = [] + + obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields + for name in obj_fields: + obj_field = obj_fields[name] + obj_field_name = obj_field.alias if obj_field.alias is not None else name + if obj_field_name == "": + continue + + val = getattr(obj, name) + if val is None: + continue + + if explode: + form[obj_field_name] = [_val_to_string(val)] + else: + items.append(f"{obj_field_name}{delimiter}{_val_to_string(val)}") + + if len(items) > 0: + form[field_name] = [delimiter.join(items)] + elif isinstance(obj, Dict): + items = [] + for key, value in obj.items(): + if value is None: + continue + + if explode: + form[key] = [_val_to_string(value)] + else: + items.append(f"{key}{delimiter}{_val_to_string(value)}") + + if len(items) > 0: + form[field_name] = [delimiter.join(items)] + elif isinstance(obj, List): + items = [] + + for value in obj: + if value is None: + continue + + if explode: + if not field_name in form: + form[field_name] = [] + form[field_name].append(_val_to_string(value)) + else: + items.append(_val_to_string(value)) + + if len(items) > 0: + form[field_name] = [delimiter.join([str(item) for item in items])] + else: + form[field_name] = [_val_to_string(obj)] + + return form + + +def serialize_multipart_form( + media_type: str, request: Any +) -> Tuple[str, Dict[str, Any], Dict[str, Any]]: + form: Dict[str, Any] = {} + files: Dict[str, Any] = {} + + if not isinstance(request, BaseModel): + raise TypeError("invalid request body type") + + request_fields: Dict[str, FieldInfo] = request.__class__.model_fields + request_field_types = get_type_hints(request.__class__) + + for name in request_fields: + field = request_fields[name] + + val = getattr(request, name) + if val is None: + continue + + field_metadata = find_field_metadata(field, MultipartFormMetadata) + if not field_metadata: + continue + + f_name = field.alias if field.alias is not None else name + + if field_metadata.file: + file_fields: Dict[str, FieldInfo] = val.__class__.model_fields + + file_name = "" + field_name = "" + content = None + content_type = None + + for file_field_name in file_fields: + file_field = file_fields[file_field_name] + + file_metadata = find_field_metadata(file_field, MultipartFormMetadata) + if file_metadata is None: + continue + + if file_metadata.content: + content = getattr(val, file_field_name, None) + elif file_field_name == "content_type": + content_type = getattr(val, file_field_name, None) + else: + field_name = ( + file_field.alias + if file_field.alias is not None + else file_field_name + ) + file_name = getattr(val, file_field_name) + + if field_name == "" or file_name == "" or content is None: + raise ValueError("invalid multipart/form-data file") + + if content_type is not None: + files[field_name] = (file_name, content, content_type) + else: + files[field_name] = (file_name, content) + elif field_metadata.json: + files[f_name] = ( + None, + marshal_json(val, request_field_types[name]), + "application/json", + ) + else: + if isinstance(val, List): + values = [] + + for value in val: + if value is None: + continue + values.append(_val_to_string(value)) + + form[f_name + "[]"] = values + else: + form[f_name] = _val_to_string(val) + return media_type, form, files + + +def serialize_form_data(data: Any) -> Dict[str, Any]: + form: Dict[str, List[str]] = {} + + if isinstance(data, BaseModel): + data_fields: Dict[str, FieldInfo] = data.__class__.model_fields + data_field_types = get_type_hints(data.__class__) + for name in data_fields: + field = data_fields[name] + + val = getattr(data, name) + if val is None: + continue + + metadata = find_field_metadata(field, FormMetadata) + if metadata is None: + continue + + f_name = field.alias if field.alias is not None else name + + if metadata.json: + form[f_name] = [marshal_json(val, data_field_types[name])] + else: + if metadata.style == "form": + _populate_form( + f_name, + metadata.explode, + val, + ",", + form, + ) + else: + raise ValueError(f"Invalid form style for field {name}") + elif isinstance(data, Dict): + for key, value in data.items(): + form[key] = [_val_to_string(value)] + else: + raise TypeError(f"Invalid request body type {type(data)} for form data") + + return form diff --git a/src/dub/utils/headers.py b/src/dub/utils/headers.py new file mode 100644 index 0000000..ad25879 --- /dev/null +++ b/src/dub/utils/headers.py @@ -0,0 +1,136 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from typing import ( + Any, + Dict, + List, + Optional, +) +from httpx import Headers +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + HeaderMetadata, + find_field_metadata, +) + +from .values import _populate_from_globals, _val_to_string + + +def get_headers(headers_params: Any, gbls: Optional[Any] = None) -> Dict[str, str]: + headers: Dict[str, str] = {} + + globals_already_populated = [] + if headers_params is not None: + globals_already_populated = _populate_headers(headers_params, gbls, headers, []) + if gbls is not None: + _populate_headers(gbls, None, headers, globals_already_populated) + + return headers + + +def _populate_headers( + headers_params: Any, + gbls: Any, + header_values: Dict[str, str], + skip_fields: List[str], +) -> List[str]: + globals_already_populated: List[str] = [] + + if not isinstance(headers_params, BaseModel): + return globals_already_populated + + param_fields: Dict[str, FieldInfo] = headers_params.__class__.model_fields + for name in param_fields: + if name in skip_fields: + continue + + field = param_fields[name] + f_name = field.alias if field.alias is not None else name + + metadata = find_field_metadata(field, HeaderMetadata) + if metadata is None: + continue + + value, global_found = _populate_from_globals( + name, getattr(headers_params, name), HeaderMetadata, gbls + ) + if global_found: + globals_already_populated.append(name) + value = _serialize_header(metadata.explode, value) + + if value != "": + header_values[f_name] = value + + return globals_already_populated + + +def _serialize_header(explode: bool, obj: Any) -> str: + if obj is None: + return "" + + if isinstance(obj, BaseModel): + items = [] + obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields + for name in obj_fields: + obj_field = obj_fields[name] + obj_param_metadata = find_field_metadata(obj_field, HeaderMetadata) + + if not obj_param_metadata: + continue + + f_name = obj_field.alias if obj_field.alias is not None else name + + val = getattr(obj, name) + if val is None: + continue + + if explode: + items.append(f"{f_name}={_val_to_string(val)}") + else: + items.append(f_name) + items.append(_val_to_string(val)) + + if len(items) > 0: + return ",".join(items) + elif isinstance(obj, Dict): + items = [] + + for key, value in obj.items(): + if value is None: + continue + + if explode: + items.append(f"{key}={_val_to_string(value)}") + else: + items.append(key) + items.append(_val_to_string(value)) + + if len(items) > 0: + return ",".join([str(item) for item in items]) + elif isinstance(obj, List): + items = [] + + for value in obj: + if value is None: + continue + + items.append(_val_to_string(value)) + + if len(items) > 0: + return ",".join(items) + else: + return f"{_val_to_string(obj)}" + + return "" + + +def get_response_headers(headers: Headers) -> Dict[str, List[str]]: + res = {} + for k, v in headers.items(): + if not k in res: + res[k] = [] + + res[k].append(v) + return res diff --git a/src/dub/utils/metadata.py b/src/dub/utils/metadata.py new file mode 100644 index 0000000..b5693b3 --- /dev/null +++ b/src/dub/utils/metadata.py @@ -0,0 +1,118 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from typing import Optional, Type, TypeVar, Union +from dataclasses import dataclass +from pydantic.fields import FieldInfo + + +T = TypeVar("T") + + +@dataclass +class SecurityMetadata: + option: bool = False + scheme: bool = False + scheme_type: Optional[str] = None + sub_type: Optional[str] = None + field_name: Optional[str] = None + + def get_field_name(self, default: str) -> str: + return self.field_name or default + + +@dataclass +class ParamMetadata: + serialization: Optional[str] = None + style: str = "simple" + explode: bool = False + + +@dataclass +class PathParamMetadata(ParamMetadata): + pass + + +@dataclass +class QueryParamMetadata(ParamMetadata): + style: str = "form" + explode: bool = True + + +@dataclass +class HeaderMetadata(ParamMetadata): + pass + + +@dataclass +class RequestMetadata: + media_type: str = "application/octet-stream" + + +@dataclass +class MultipartFormMetadata: + file: bool = False + content: bool = False + json: bool = False + + +@dataclass +class FormMetadata: + json: bool = False + style: str = "form" + explode: bool = True + + +class FieldMetadata: + security: Optional[SecurityMetadata] = None + path: Optional[PathParamMetadata] = None + query: Optional[QueryParamMetadata] = None + header: Optional[HeaderMetadata] = None + request: Optional[RequestMetadata] = None + form: Optional[FormMetadata] = None + multipart: Optional[MultipartFormMetadata] = None + + def __init__( + self, + security: Optional[SecurityMetadata] = None, + path: Optional[Union[PathParamMetadata, bool]] = None, + query: Optional[Union[QueryParamMetadata, bool]] = None, + header: Optional[Union[HeaderMetadata, bool]] = None, + request: Optional[Union[RequestMetadata, bool]] = None, + form: Optional[Union[FormMetadata, bool]] = None, + multipart: Optional[Union[MultipartFormMetadata, bool]] = None, + ): + self.security = security + self.path = PathParamMetadata() if isinstance(path, bool) else path + self.query = QueryParamMetadata() if isinstance(query, bool) else query + self.header = HeaderMetadata() if isinstance(header, bool) else header + self.request = RequestMetadata() if isinstance(request, bool) else request + self.form = FormMetadata() if isinstance(form, bool) else form + self.multipart = ( + MultipartFormMetadata() if isinstance(multipart, bool) else multipart + ) + + +def find_field_metadata(field_info: FieldInfo, metadata_type: Type[T]) -> Optional[T]: + metadata = find_metadata(field_info, FieldMetadata) + if not metadata: + return None + + fields = metadata.__dict__ + + for field in fields: + if isinstance(fields[field], metadata_type): + return fields[field] + + return None + + +def find_metadata(field_info: FieldInfo, metadata_type: Type[T]) -> Optional[T]: + metadata = field_info.metadata + if not metadata: + return None + + for md in metadata: + if isinstance(md, metadata_type): + return md + + return None diff --git a/src/dub/utils/queryparams.py b/src/dub/utils/queryparams.py new file mode 100644 index 0000000..5f5ca8d --- /dev/null +++ b/src/dub/utils/queryparams.py @@ -0,0 +1,162 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from typing import ( + Any, + Dict, + get_type_hints, + List, + Optional, +) + +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + QueryParamMetadata, + find_field_metadata, +) +from .values import _get_serialized_params, _populate_from_globals, _val_to_string +from .forms import _populate_form + + +def get_query_params( + query_params: Any, + gbls: Optional[Any] = None, +) -> Dict[str, List[str]]: + params: Dict[str, List[str]] = {} + + globals_already_populated = _populate_query_params(query_params, gbls, params, []) + if gbls is not None: + _populate_query_params(gbls, None, params, globals_already_populated) + + return params + + +def _populate_query_params( + query_params: Any, + gbls: Any, + query_param_values: Dict[str, List[str]], + skip_fields: List[str], +) -> List[str]: + globals_already_populated: List[str] = [] + + if not isinstance(query_params, BaseModel): + return globals_already_populated + + param_fields: Dict[str, FieldInfo] = query_params.__class__.model_fields + param_field_types = get_type_hints(query_params.__class__) + for name in param_fields: + if name in skip_fields: + continue + + field = param_fields[name] + + metadata = find_field_metadata(field, QueryParamMetadata) + if not metadata: + continue + + value = getattr(query_params, name) if query_params is not None else None + + value, global_found = _populate_from_globals( + name, value, QueryParamMetadata, gbls + ) + if global_found: + globals_already_populated.append(name) + + f_name = field.alias if field.alias is not None else name + serialization = metadata.serialization + if serialization is not None: + serialized_parms = _get_serialized_params( + metadata, f_name, value, param_field_types[name] + ) + for key, value in serialized_parms.items(): + if key in query_param_values: + query_param_values[key].extend(value) + else: + query_param_values[key] = [value] + else: + style = metadata.style + if style == "deepObject": + _populate_deep_object_query_params(f_name, value, query_param_values) + elif style == "form": + _populate_delimited_query_params( + metadata, f_name, value, ",", query_param_values + ) + elif style == "pipeDelimited": + _populate_delimited_query_params( + metadata, f_name, value, "|", query_param_values + ) + else: + raise NotImplementedError( + f"query param style {style} not yet supported" + ) + + return globals_already_populated + + +def _populate_deep_object_query_params( + field_name: str, + obj: Any, + params: Dict[str, List[str]], +): + if obj is None: + return + + if isinstance(obj, BaseModel): + obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields + for name in obj_fields: + obj_field = obj_fields[name] + + f_name = obj_field.alias if obj_field.alias is not None else name + + obj_param_metadata = find_field_metadata(obj_field, QueryParamMetadata) + if obj_param_metadata is None: + continue + + obj_val = getattr(obj, name) + if obj_val is None: + continue + + if isinstance(obj_val, List): + for val in obj_val: + if val is None: + continue + + if params.get(f"{field_name}[{f_name}]") is None: + params[f"{field_name}[{f_name}]"] = [] + + params[f"{field_name}[{f_name}]"].append(_val_to_string(val)) + else: + params[f"{field_name}[{f_name}]"] = [_val_to_string(obj_val)] + elif isinstance(obj, Dict): + for key, value in obj.items(): + if value is None: + continue + + if isinstance(value, List): + for val in value: + if val is None: + continue + + if params.get(f"{field_name}[{key}]") is None: + params[f"{field_name}[{key}]"] = [] + + params[f"{field_name}[{key}]"].append(_val_to_string(val)) + else: + params[f"{field_name}[{key}]"] = [_val_to_string(value)] + + +def _populate_delimited_query_params( + metadata: QueryParamMetadata, + field_name: str, + obj: Any, + delimiter: str, + query_param_values: Dict[str, List[str]], +): + _populate_form( + field_name, + metadata.explode, + obj, + delimiter, + query_param_values, + ) diff --git a/src/dub/utils/requestbodies.py b/src/dub/utils/requestbodies.py new file mode 100644 index 0000000..c01e275 --- /dev/null +++ b/src/dub/utils/requestbodies.py @@ -0,0 +1,66 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +import io +from dataclasses import dataclass +import re +from typing import ( + Any, + Optional, +) + +from .forms import serialize_form_data, serialize_multipart_form + +from .serializers import marshal_json + +SERIALIZATION_METHOD_TO_CONTENT_TYPE = { + "json": "application/json", + "form": "application/x-www-form-urlencoded", + "multipart": "multipart/form-data", + "raw": "application/octet-stream", + "string": "text/plain", +} + + +@dataclass +class SerializedRequestBody: + media_type: str + content: Optional[Any] = None + data: Optional[Any] = None + files: Optional[Any] = None + + +def serialize_request_body( + request_body: Any, + nullable: bool, + optional: bool, + serialization_method: str, + request_body_type, +) -> Optional[SerializedRequestBody]: + if request_body is None: + if not nullable and optional: + return None + + media_type = SERIALIZATION_METHOD_TO_CONTENT_TYPE[serialization_method] + + serialized_request_body = SerializedRequestBody(media_type) + + if re.match(r"(application|text)\/.*?\+*json.*", media_type) is not None: + serialized_request_body.content = marshal_json(request_body, request_body_type) + elif re.match(r"multipart\/.*", media_type) is not None: + ( + serialized_request_body.media_type, + serialized_request_body.data, + serialized_request_body.files, + ) = serialize_multipart_form(media_type, request_body) + elif re.match(r"application\/x-www-form-urlencoded.*", media_type) is not None: + serialized_request_body.data = serialize_form_data(request_body) + elif isinstance(request_body, (bytes, bytearray, io.BytesIO, io.BufferedReader)): + serialized_request_body.content = request_body + elif isinstance(request_body, str): + serialized_request_body.content = request_body + else: + raise TypeError( + f"invalid request body type {type(request_body)} for mediaType {media_type}" + ) + + return serialized_request_body diff --git a/src/dub/utils/retries.py b/src/dub/utils/retries.py index c40fc41..2540494 100644 --- a/src/dub/utils/retries.py +++ b/src/dub/utils/retries.py @@ -4,7 +4,7 @@ import time from typing import List -import requests +import httpx class BackoffStrategy: @@ -13,7 +13,13 @@ class BackoffStrategy: exponent: float max_elapsed_time: int - def __init__(self, initial_interval: int, max_interval: int, exponent: float, max_elapsed_time: int): + def __init__( + self, + initial_interval: int, + max_interval: int, + exponent: float, + max_elapsed_time: int, + ): self.initial_interval = initial_interval self.max_interval = max_interval self.exponent = exponent @@ -25,7 +31,9 @@ class RetryConfig: backoff: BackoffStrategy retry_connection_errors: bool - def __init__(self, strategy: str, backoff: BackoffStrategy, retry_connection_errors: bool): + def __init__( + self, strategy: str, backoff: BackoffStrategy, retry_connection_errors: bool + ): self.strategy = strategy self.backoff = backoff self.retry_connection_errors = retry_connection_errors @@ -41,9 +49,9 @@ def __init__(self, config: RetryConfig, status_codes: List[str]): class TemporaryError(Exception): - response: requests.Response + response: httpx.Response - def __init__(self, response: requests.Response): + def __init__(self, response: httpx.Response): self.response = response @@ -55,9 +63,10 @@ def __init__(self, inner: Exception): def retry(func, retries: Retries): - if retries.config.strategy == 'backoff': + if retries.config.strategy == "backoff": + def do_request(): - res: requests.Response + res: httpx.Response try: res = func() @@ -74,12 +83,12 @@ def do_request(): if res.status_code == parsed_code: raise TemporaryError(res) - except requests.exceptions.ConnectionError as exception: + except httpx.ConnectError as exception: if retries.config.retry_connection_errors: raise raise PermanentError(exception) from exception - except requests.exceptions.Timeout as exception: + except httpx.TimeoutException as exception: if retries.config.retry_connection_errors: raise @@ -91,13 +100,74 @@ def do_request(): return res - return retry_with_backoff(do_request, retries.config.backoff.initial_interval, retries.config.backoff.max_interval, retries.config.backoff.exponent, retries.config.backoff.max_elapsed_time) + return retry_with_backoff( + do_request, + retries.config.backoff.initial_interval, + retries.config.backoff.max_interval, + retries.config.backoff.exponent, + retries.config.backoff.max_elapsed_time, + ) return func() -def retry_with_backoff(func, initial_interval=500, max_interval=60000, exponent=1.5, max_elapsed_time=3600000): - start = round(time.time()*1000) +async def retry_async(func, retries: Retries): + if retries.config.strategy == "backoff": + + async def do_request(): + res: httpx.Response + try: + res = await func() + + for code in retries.status_codes: + if "X" in code.upper(): + code_range = int(code[0]) + + status_major = res.status_code / 100 + + if status_major >= code_range and status_major < code_range + 1: + raise TemporaryError(res) + else: + parsed_code = int(code) + + if res.status_code == parsed_code: + raise TemporaryError(res) + except httpx.ConnectError as exception: + if retries.config.retry_connection_errors: + raise + + raise PermanentError(exception) from exception + except httpx.TimeoutException as exception: + if retries.config.retry_connection_errors: + raise + + raise PermanentError(exception) from exception + except TemporaryError: + raise + except Exception as exception: + raise PermanentError(exception) from exception + + return res + + return await retry_with_backoff_async( + do_request, + retries.config.backoff.initial_interval, + retries.config.backoff.max_interval, + retries.config.backoff.exponent, + retries.config.backoff.max_elapsed_time, + ) + + return await func() + + +def retry_with_backoff( + func, + initial_interval=500, + max_interval=60000, + exponent=1.5, + max_elapsed_time=3600000, +): + start = round(time.time() * 1000) retries = 0 while True: @@ -106,14 +176,41 @@ def retry_with_backoff(func, initial_interval=500, max_interval=60000, exponent= except PermanentError as exception: raise exception.inner except Exception as exception: # pylint: disable=broad-exception-caught - now = round(time.time()*1000) + now = round(time.time() * 1000) + if now - start > max_elapsed_time: + if isinstance(exception, TemporaryError): + return exception.response + + raise + sleep = (initial_interval / 1000) * exponent**retries + random.uniform(0, 1) + sleep = min(sleep, max_interval / 1000) + time.sleep(sleep) + retries += 1 + + +async def retry_with_backoff_async( + func, + initial_interval=500, + max_interval=60000, + exponent=1.5, + max_elapsed_time=3600000, +): + start = round(time.time() * 1000) + retries = 0 + + while True: + try: + return await func() + except PermanentError as exception: + raise exception.inner + except Exception as exception: # pylint: disable=broad-exception-caught + now = round(time.time() * 1000) if now - start > max_elapsed_time: if isinstance(exception, TemporaryError): return exception.response raise - sleep = ((initial_interval/1000) * - exponent**retries + random.uniform(0, 1)) + sleep = (initial_interval / 1000) * exponent**retries + random.uniform(0, 1) sleep = min(sleep, max_interval / 1000) time.sleep(sleep) retries += 1 diff --git a/src/dub/utils/security.py b/src/dub/utils/security.py new file mode 100644 index 0000000..d25d654 --- /dev/null +++ b/src/dub/utils/security.py @@ -0,0 +1,166 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +import base64 +from typing import ( + Any, + Dict, + Tuple, +) +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + SecurityMetadata, + find_field_metadata, +) + + +def get_security(security: Any) -> Tuple[Dict[str, str], Dict[str, str]]: + headers: Dict[str, str] = {} + query_params: Dict[str, str] = {} + + if security is None: + return headers, query_params + + if not isinstance(security, BaseModel): + raise TypeError("security must be a pydantic model") + + sec_fields: Dict[str, FieldInfo] = security.__class__.model_fields + for name in sec_fields: + sec_field = sec_fields[name] + + value = getattr(security, name) + if value is None: + continue + + metadata = find_field_metadata(sec_field, SecurityMetadata) + if metadata is None: + continue + if metadata.option: + _parse_security_option(headers, query_params, value) + return headers, query_params + if metadata.scheme: + # Special case for basic auth which could be a flattened model + if metadata.sub_type == "basic" and not isinstance(value, BaseModel): + _parse_security_scheme(headers, query_params, metadata, name, security) + else: + _parse_security_scheme(headers, query_params, metadata, name, value) + + return headers, query_params + + +def _parse_security_option( + headers: Dict[str, str], query_params: Dict[str, str], option: Any +): + if not isinstance(option, BaseModel): + raise TypeError("security option must be a pydantic model") + + opt_fields: Dict[str, FieldInfo] = option.__class__.model_fields + for name in opt_fields: + opt_field = opt_fields[name] + + metadata = find_field_metadata(opt_field, SecurityMetadata) + if metadata is None or not metadata.scheme: + continue + _parse_security_scheme( + headers, query_params, metadata, name, getattr(option, name) + ) + + +def _parse_security_scheme( + headers: Dict[str, str], + query_params: Dict[str, str], + scheme_metadata: SecurityMetadata, + field_name: str, + scheme: Any, +): + scheme_type = scheme_metadata.scheme_type + sub_type = scheme_metadata.sub_type + + if isinstance(scheme, BaseModel): + if scheme_type == "http" and sub_type == "basic": + _parse_basic_auth_scheme(headers, scheme) + return + + scheme_fields: Dict[str, FieldInfo] = scheme.__class__.model_fields + for name in scheme_fields: + scheme_field = scheme_fields[name] + + metadata = find_field_metadata(scheme_field, SecurityMetadata) + if metadata is None or metadata.field_name is None: + continue + + value = getattr(scheme, name) + + _parse_security_scheme_value( + headers, query_params, scheme_metadata, metadata, name, value + ) + else: + _parse_security_scheme_value( + headers, query_params, scheme_metadata, scheme_metadata, field_name, scheme + ) + + +def _parse_security_scheme_value( + headers: Dict[str, str], + query_params: Dict[str, str], + scheme_metadata: SecurityMetadata, + security_metadata: SecurityMetadata, + field_name: str, + value: Any, +): + scheme_type = scheme_metadata.scheme_type + sub_type = scheme_metadata.sub_type + + header_name = security_metadata.get_field_name(field_name) + + if scheme_type == "apiKey": + if sub_type == "header": + headers[header_name] = value + elif sub_type == "query": + query_params[header_name] = value + else: + raise ValueError("sub type {sub_type} not supported") + elif scheme_type == "openIdConnect": + headers[header_name] = _apply_bearer(value) + elif scheme_type == "oauth2": + if sub_type != "client_credentials": + headers[header_name] = _apply_bearer(value) + elif scheme_type == "http": + if sub_type == "bearer": + headers[header_name] = _apply_bearer(value) + else: + raise ValueError("sub type {sub_type} not supported") + else: + raise ValueError("scheme type {scheme_type} not supported") + + +def _apply_bearer(token: str) -> str: + return token.lower().startswith("bearer ") and token or f"Bearer {token}" + + +def _parse_basic_auth_scheme(headers: Dict[str, str], scheme: Any): + username = "" + password = "" + + if not isinstance(scheme, BaseModel): + raise TypeError("basic auth scheme must be a pydantic model") + + scheme_fields: Dict[str, FieldInfo] = scheme.__class__.model_fields + for name in scheme_fields: + scheme_field = scheme_fields[name] + + metadata = find_field_metadata(scheme_field, SecurityMetadata) + if metadata is None or metadata.field_name is None: + continue + + field_name = metadata.field_name + value = getattr(scheme, name) + + if field_name == "username": + username = value + if field_name == "password": + password = value + + data = f"{username}:{password}".encode() + headers["Authorization"] = f"Basic {base64.b64encode(data).decode()}" diff --git a/src/dub/utils/serializers.py b/src/dub/utils/serializers.py new file mode 100644 index 0000000..15cca42 --- /dev/null +++ b/src/dub/utils/serializers.py @@ -0,0 +1,159 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from decimal import Decimal +import json +from typing import Type, TypeVar, Union, get_args +from typing_extensions import get_origin +from pydantic import ConfigDict, create_model +from pydantic_core import from_json +from typing_inspect import is_optional_type + +from ..types.basemodel import Nullable + + +def serialize_decimal(as_str: bool): + def serialize(d): + if is_optional_type(type(d)) and d is None: + return None + + if not isinstance(d, Decimal): + raise ValueError("Expected Decimal object") + + return str(d) if as_str else float(d) + + return serialize + + +def validate_decimal(d): + if d is None: + return None + + if isinstance(d, Decimal): + return d + + if not isinstance(d, (str, int, float)): + raise ValueError("Expected string, int or float") + + return Decimal(str(d)) + + +def serialize_float(as_str: bool): + def serialize(f): + if is_optional_type(type(f)) and f is None: + return None + + if not isinstance(f, float): + raise ValueError("Expected float") + + return str(f) if as_str else f + + return serialize + + +def validate_float(f): + if f is None: + return None + + if isinstance(f, float): + return f + + if not isinstance(f, str): + raise ValueError("Expected string") + + return float(f) + + +def serialize_int(as_str: bool): + def serialize(b): + if is_optional_type(type(b)) and b is None: + return None + + if not isinstance(b, int): + raise ValueError("Expected int") + + return str(b) if as_str else b + + return serialize + + +def validate_int(b): + if b is None: + return None + + if isinstance(b, int): + return b + + if not isinstance(b, str): + raise ValueError("Expected string") + + return int(b) + + +def validate_open_enum(is_int: bool): + def validate(e): + if e is None: + return None + + if is_int: + if not isinstance(e, int): + raise ValueError("Expected int") + else: + if not isinstance(e, str): + raise ValueError("Expected string") + + return e + + return validate + + +T = TypeVar("T") + + +def unmarshal_json(raw, typ: Type[T]) -> T: + return unmarshal(from_json(raw), typ) + + +def unmarshal(val, typ: Type[T]) -> T: + unmarshaller = create_model( + "Unmarshaller", + body=(typ, ...), + __config__=ConfigDict(populate_by_name=True, arbitrary_types_allowed=True), + ) + + m = unmarshaller(body=val) + + return m.body # pyright: ignore[reportAttributeAccessIssue] + + +def marshal_json(val, typ): + if is_nullable(typ) and val is None: + return "null" + + marshaller = create_model( + "Marshaller", + body=(typ, ...), + __config__=ConfigDict(populate_by_name=True, arbitrary_types_allowed=True), + ) + + m = marshaller(body=val) + + d = m.model_dump(by_alias=True, mode="json", exclude_none=True) + + if len(d) == 0: + return "" + + return json.dumps(d[next(iter(d))], separators=(",", ":"), sort_keys=True) + + +def is_nullable(field): + if get_origin(field) is Nullable: + return True + + if not get_origin(field) is Union or type(None) not in get_args(field): + return False + + for arg in get_args(field): + if get_origin(arg) is Nullable: + return True + + return False diff --git a/src/dub/utils/url.py b/src/dub/utils/url.py new file mode 100644 index 0000000..481ae77 --- /dev/null +++ b/src/dub/utils/url.py @@ -0,0 +1,152 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from decimal import Decimal +from typing import ( + Any, + Dict, + get_type_hints, + List, + Optional, + Union, + get_args, + get_origin, +) +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + PathParamMetadata, + find_field_metadata, +) +from .values import _get_serialized_params, _populate_from_globals, _val_to_string + + +def generate_url( + server_url: str, + path: str, + path_params: Any, + gbls: Optional[Any] = None, +) -> str: + path_param_values: Dict[str, str] = {} + + globals_already_populated = _populate_path_params( + path_params, gbls, path_param_values, [] + ) + if gbls is not None: + _populate_path_params(gbls, None, path_param_values, globals_already_populated) + + for key, value in path_param_values.items(): + path = path.replace("{" + key + "}", value, 1) + + return remove_suffix(server_url, "/") + path + + +def _populate_path_params( + path_params: Any, + gbls: Any, + path_param_values: Dict[str, str], + skip_fields: List[str], +) -> List[str]: + globals_already_populated: List[str] = [] + + if not isinstance(path_params, BaseModel): + return globals_already_populated + + path_param_fields: Dict[str, FieldInfo] = path_params.__class__.model_fields + path_param_field_types = get_type_hints(path_params.__class__) + for name in path_param_fields: + if name in skip_fields: + continue + + field = path_param_fields[name] + + param_metadata = find_field_metadata(field, PathParamMetadata) + if param_metadata is None: + continue + + param = getattr(path_params, name) if path_params is not None else None + param, global_found = _populate_from_globals( + name, param, PathParamMetadata, gbls + ) + if global_found: + globals_already_populated.append(name) + + if param is None: + continue + + f_name = field.alias if field.alias is not None else name + serialization = param_metadata.serialization + if serialization is not None: + serialized_params = _get_serialized_params( + param_metadata, f_name, param, path_param_field_types[name] + ) + for key, value in serialized_params.items(): + path_param_values[key] = value + else: + if param_metadata.style == "simple": + if isinstance(param, List): + pp_vals: List[str] = [] + for pp_val in param: + if pp_val is None: + continue + pp_vals.append(_val_to_string(pp_val)) + path_param_values[f_name] = ",".join(pp_vals) + elif isinstance(param, Dict): + pp_vals: List[str] = [] + for pp_key in param: + if param[pp_key] is None: + continue + if param_metadata.explode: + pp_vals.append(f"{pp_key}={_val_to_string(param[pp_key])}") + else: + pp_vals.append(f"{pp_key},{_val_to_string(param[pp_key])}") + path_param_values[f_name] = ",".join(pp_vals) + elif not isinstance(param, (str, int, float, complex, bool, Decimal)): + pp_vals: List[str] = [] + param_fields: Dict[str, FieldInfo] = param.__class__.model_fields + for name in param_fields: + param_field = param_fields[name] + + param_value_metadata = find_field_metadata( + param_field, PathParamMetadata + ) + if param_value_metadata is None: + continue + + param_name = ( + param_field.alias if param_field.alias is not None else name + ) + + param_field_val = getattr(param, name) + if param_field_val is None: + continue + if param_metadata.explode: + pp_vals.append( + f"{param_name}={_val_to_string(param_field_val)}" + ) + else: + pp_vals.append( + f"{param_name},{_val_to_string(param_field_val)}" + ) + path_param_values[f_name] = ",".join(pp_vals) + else: + path_param_values[f_name] = _val_to_string(param) + + return globals_already_populated + + +def is_optional(field): + return get_origin(field) is Union and type(None) in get_args(field) + + +def template_url(url_with_params: str, params: Dict[str, str]) -> str: + for key, value in params.items(): + url_with_params = url_with_params.replace("{" + key + "}", value) + + return url_with_params + + +def remove_suffix(input_string, suffix): + if suffix and input_string.endswith(suffix): + return input_string[: -len(suffix)] + return input_string diff --git a/src/dub/utils/utils.py b/src/dub/utils/utils.py deleted file mode 100644 index 1467a90..0000000 --- a/src/dub/utils/utils.py +++ /dev/null @@ -1,1099 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import base64 -import json -import re -import sys -from dataclasses import Field, fields, is_dataclass, make_dataclass -from datetime import date, datetime -from decimal import Decimal -from email.message import Message -from enum import Enum -from typing import ( - Any, - Callable, - Dict, - List, - Optional, - Tuple, - Union, - get_args, - get_origin, -) -from xmlrpc.client import boolean -from typing_inspect import is_optional_type -import dateutil.parser -from dataclasses_json import DataClassJsonMixin - - -def get_security(security: Any) -> Tuple[Dict[str, str], Dict[str, str]]: - headers: Dict[str, str] = {} - query_params: Dict[str, str] = {} - - if security is None: - return headers, query_params - - sec_fields: Tuple[Field, ...] = fields(security) - for sec_field in sec_fields: - value = getattr(security, sec_field.name) - if value is None: - continue - - metadata = sec_field.metadata.get("security") - if metadata is None: - continue - if metadata.get("option"): - _parse_security_option(headers, query_params, value) - return headers, query_params - if metadata.get("scheme"): - # Special case for basic auth which could be a flattened struct - if metadata.get("sub_type") == "basic" and not is_dataclass(value): - _parse_security_scheme(headers, query_params, metadata, security) - else: - _parse_security_scheme(headers, query_params, metadata, value) - - return headers, query_params - - -def _parse_security_option( - headers: Dict[str, str], query_params: Dict[str, str], option: Any -): - opt_fields: Tuple[Field, ...] = fields(option) - for opt_field in opt_fields: - metadata = opt_field.metadata.get("security") - if metadata is None or metadata.get("scheme") is None: - continue - _parse_security_scheme( - headers, query_params, metadata, getattr(option, opt_field.name) - ) - - -def _parse_security_scheme( - headers: Dict[str, str], - query_params: Dict[str, str], - scheme_metadata: Dict, - scheme: Any, -): - scheme_type = scheme_metadata.get("type") - sub_type = scheme_metadata.get("sub_type") - - if is_dataclass(scheme): - if scheme_type == "http" and sub_type == "basic": - _parse_basic_auth_scheme(headers, scheme) - return - - scheme_fields: Tuple[Field, ...] = fields(scheme) - for scheme_field in scheme_fields: - metadata = scheme_field.metadata.get("security") - if metadata is None or metadata.get("field_name") is None: - continue - - value = getattr(scheme, scheme_field.name) - - _parse_security_scheme_value( - headers, query_params, scheme_metadata, metadata, value - ) - else: - _parse_security_scheme_value( - headers, query_params, scheme_metadata, scheme_metadata, scheme - ) - - -def _parse_security_scheme_value( - headers: Dict[str, str], - query_params: Dict[str, str], - scheme_metadata: Dict, - security_metadata: Dict, - value: Any, -): - scheme_type = scheme_metadata.get("type") - sub_type = scheme_metadata.get("sub_type") - - header_name = str(security_metadata.get("field_name")) - - if scheme_type == "apiKey": - if sub_type == "header": - headers[header_name] = value - elif sub_type == "query": - query_params[header_name] = value - else: - raise Exception("not supported") - elif scheme_type == "openIdConnect": - headers[header_name] = _apply_bearer(value) - elif scheme_type == "oauth2": - if sub_type != "client_credentials": - headers[header_name] = _apply_bearer(value) - elif scheme_type == "http": - if sub_type == "bearer": - headers[header_name] = _apply_bearer(value) - else: - raise Exception("not supported") - else: - raise Exception("not supported") - - -def _apply_bearer(token: str) -> str: - return token.lower().startswith("bearer ") and token or f"Bearer {token}" - - -def _parse_basic_auth_scheme(headers: Dict[str, str], scheme: Any): - username = "" - password = "" - - scheme_fields: Tuple[Field, ...] = fields(scheme) - for scheme_field in scheme_fields: - metadata = scheme_field.metadata.get("security") - if metadata is None or metadata.get("field_name") is None: - continue - - field_name = metadata.get("field_name") - value = getattr(scheme, scheme_field.name) - - if field_name == "username": - username = value - if field_name == "password": - password = value - - data = f"{username}:{password}".encode() - headers["Authorization"] = f"Basic {base64.b64encode(data).decode()}" - - -def generate_url( - server_url: str, - path: str, - path_params: Any, - gbls: Optional[Any] = None, -) -> str: - path_param_values: Dict[str, str] = {} - - globals_already_populated = _populate_path_params( - path_params, gbls, path_param_values, [] - ) - if gbls is not None: - _populate_path_params(gbls, None, path_param_values, globals_already_populated) - - for key, value in path_param_values.items(): - path = path.replace("{" + key + "}", value, 1) - - return remove_suffix(server_url, "/") + path - - -def _populate_path_params( - path_params: Any, - gbls: Any, - path_param_values: Dict[str, str], - skip_fields: List[str], -) -> List[str]: - globals_already_populated: List[str] = [] - - path_param_fields: Tuple[Field, ...] = fields(path_params) - for field in path_param_fields: - if field.name in skip_fields: - continue - - param_metadata = field.metadata.get("path_param") - if param_metadata is None: - continue - - param = getattr(path_params, field.name) if path_params is not None else None - param, global_found = _populate_from_globals( - field.name, param, "path_param", gbls - ) - if global_found: - globals_already_populated.append(field.name) - - if param is None: - continue - - f_name = param_metadata.get("field_name", field.name) - serialization = param_metadata.get("serialization", "") - if serialization != "": - serialized_params = _get_serialized_params( - param_metadata, field.type, f_name, param - ) - for key, value in serialized_params.items(): - path_param_values[key] = value - else: - if param_metadata.get("style", "simple") == "simple": - if isinstance(param, List): - pp_vals: List[str] = [] - for pp_val in param: - if pp_val is None: - continue - pp_vals.append(_val_to_string(pp_val)) - path_param_values[param_metadata.get("field_name", field.name)] = ( - ",".join(pp_vals) - ) - elif isinstance(param, Dict): - pp_vals: List[str] = [] - for pp_key in param: - if param[pp_key] is None: - continue - if param_metadata.get("explode"): - pp_vals.append(f"{pp_key}={_val_to_string(param[pp_key])}") - else: - pp_vals.append(f"{pp_key},{_val_to_string(param[pp_key])}") - path_param_values[param_metadata.get("field_name", field.name)] = ( - ",".join(pp_vals) - ) - elif not isinstance(param, (str, int, float, complex, bool, Decimal)): - pp_vals: List[str] = [] - param_fields: Tuple[Field, ...] = fields(param) - for param_field in param_fields: - param_value_metadata = param_field.metadata.get("path_param") - if not param_value_metadata: - continue - - param_name = param_value_metadata.get("field_name", field.name) - - param_field_val = getattr(param, param_field.name) - if param_field_val is None: - continue - if param_metadata.get("explode"): - pp_vals.append( - f"{param_name}={_val_to_string(param_field_val)}" - ) - else: - pp_vals.append( - f"{param_name},{_val_to_string(param_field_val)}" - ) - path_param_values[param_metadata.get("field_name", field.name)] = ( - ",".join(pp_vals) - ) - else: - path_param_values[param_metadata.get("field_name", field.name)] = ( - _val_to_string(param) - ) - - return globals_already_populated - - -def is_optional(field): - return get_origin(field) is Union and type(None) in get_args(field) - - -def template_url(url_with_params: str, params: Dict[str, str]) -> str: - for key, value in params.items(): - url_with_params = url_with_params.replace("{" + key + "}", value) - - return url_with_params - - -def get_query_params( - query_params: Any, - gbls: Optional[Any] = None, -) -> Dict[str, List[str]]: - params: Dict[str, List[str]] = {} - - globals_already_populated = _populate_query_params(query_params, gbls, params, []) - if gbls is not None: - _populate_query_params(gbls, None, params, globals_already_populated) - - return params - - -def _populate_query_params( - query_params: Any, - gbls: Any, - query_param_values: Dict[str, List[str]], - skip_fields: List[str], -) -> List[str]: - globals_already_populated: List[str] = [] - - param_fields: Tuple[Field, ...] = fields(query_params) - for field in param_fields: - if field.name in skip_fields: - continue - - metadata = field.metadata.get("query_param") - if not metadata: - continue - - param_name = field.name - value = getattr(query_params, param_name) if query_params is not None else None - - value, global_found = _populate_from_globals( - param_name, value, "query_param", gbls - ) - if global_found: - globals_already_populated.append(param_name) - - f_name = metadata.get("field_name") - serialization = metadata.get("serialization", "") - if serialization != "": - serialized_parms = _get_serialized_params( - metadata, field.type, f_name, value - ) - for key, value in serialized_parms.items(): - if key in query_param_values: - query_param_values[key].extend(value) - else: - query_param_values[key] = [value] - else: - style = metadata.get("style", "form") - if style == "deepObject": - _populate_deep_object_query_params( - metadata, f_name, value, query_param_values - ) - elif style == "form": - _populate_delimited_query_params( - metadata, f_name, value, ",", query_param_values - ) - elif style == "pipeDelimited": - _populate_delimited_query_params( - metadata, f_name, value, "|", query_param_values - ) - else: - raise Exception("not yet implemented") - - return globals_already_populated - - -def get_headers(headers_params: Any, gbls: Optional[Any] = None) -> Dict[str, str]: - headers: Dict[str, str] = {} - - globals_already_populated = [] - if headers_params is not None: - globals_already_populated = _populate_headers(headers_params, gbls, headers, []) - if gbls is not None: - _populate_headers(gbls, None, headers, globals_already_populated) - - return headers - - -def _populate_headers( - headers_params: Any, - gbls: Any, - header_values: Dict[str, str], - skip_fields: List[str], -) -> List[str]: - globals_already_populated: List[str] = [] - - param_fields: Tuple[Field, ...] = fields(headers_params) - for field in param_fields: - if field.name in skip_fields: - continue - - metadata = field.metadata.get("header") - if not metadata: - continue - - value, global_found = _populate_from_globals( - field.name, getattr(headers_params, field.name), "header", gbls - ) - if global_found: - globals_already_populated.append(field.name) - value = _serialize_header(metadata.get("explode", False), value) - - if value != "": - header_values[metadata.get("field_name", field.name)] = value - - return globals_already_populated - - -def _get_serialized_params( - metadata: Dict, field_type: type, field_name: str, obj: Any -) -> Dict[str, str]: - params: Dict[str, str] = {} - - serialization = metadata.get("serialization", "") - if serialization == "json": - params[metadata.get("field_name", field_name)] = marshal_json(obj, field_type) - - return params - - -def _populate_deep_object_query_params( - metadata: Dict, field_name: str, obj: Any, params: Dict[str, List[str]] -): - if obj is None: - return - - if is_dataclass(obj): - obj_fields: Tuple[Field, ...] = fields(obj) - for obj_field in obj_fields: - obj_param_metadata = obj_field.metadata.get("query_param") - if not obj_param_metadata: - continue - - obj_val = getattr(obj, obj_field.name) - if obj_val is None: - continue - - if isinstance(obj_val, List): - for val in obj_val: - if val is None: - continue - - if ( - params.get( - f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]' - ) - is None - ): - params[ - f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]' - ] = [] - - params[ - f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]' - ].append(_val_to_string(val)) - else: - params[ - f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]' - ] = [_val_to_string(obj_val)] - elif isinstance(obj, Dict): - for key, value in obj.items(): - if value is None: - continue - - if isinstance(value, List): - for val in value: - if val is None: - continue - - if ( - params.get(f'{metadata.get("field_name", field_name)}[{key}]') - is None - ): - params[f'{metadata.get("field_name", field_name)}[{key}]'] = [] - - params[f'{metadata.get("field_name", field_name)}[{key}]'].append( - _val_to_string(val) - ) - else: - params[f'{metadata.get("field_name", field_name)}[{key}]'] = [ - _val_to_string(value) - ] - - -def _get_query_param_field_name(obj_field: Field) -> str: - obj_param_metadata = obj_field.metadata.get("query_param") - - if not obj_param_metadata: - return "" - - return obj_param_metadata.get("field_name", obj_field.name) - - -def _populate_delimited_query_params( - metadata: Dict, - field_name: str, - obj: Any, - delimiter: str, - query_param_values: Dict[str, List[str]], -): - _populate_form( - field_name, - metadata.get("explode", True), - obj, - _get_query_param_field_name, - delimiter, - query_param_values, - ) - - -SERIALIZATION_METHOD_TO_CONTENT_TYPE = { - "json": "application/json", - "form": "application/x-www-form-urlencoded", - "multipart": "multipart/form-data", - "raw": "application/octet-stream", - "string": "text/plain", -} - - -def serialize_request_body( - request: Any, - request_type: type, - request_field_name: str, - nullable: bool, - optional: bool, - serialization_method: str, - encoder=None, -) -> Tuple[Optional[str], Optional[Any], Optional[Any]]: - if request is None: - if not nullable and optional: - return None, None, None - - if not is_dataclass(request) or not hasattr(request, request_field_name): - return serialize_content_type( - request_field_name, - request_type, - SERIALIZATION_METHOD_TO_CONTENT_TYPE[serialization_method], - request, - encoder, - ) - - request_val = getattr(request, request_field_name) - - if request_val is None: - if not nullable and optional: - return None, None, None - - request_fields: Tuple[Field, ...] = fields(request) - request_metadata = None - - for field in request_fields: - if field.name == request_field_name: - request_metadata = field.metadata.get("request") - break - - if request_metadata is None: - raise Exception("invalid request type") - - return serialize_content_type( - request_field_name, - request_type, - request_metadata.get("media_type", "application/octet-stream"), - request_val, - ) - - -def serialize_content_type( - field_name: str, request_type: Any, media_type: str, request: Any, encoder=None -) -> Tuple[Optional[str], Optional[Any], Optional[List[List[Any]]]]: - if re.match(r"(application|text)\/.*?\+*json.*", media_type) is not None: - return media_type, marshal_json(request, request_type, encoder), None - if re.match(r"multipart\/.*", media_type) is not None: - return serialize_multipart_form(media_type, request) - if re.match(r"application\/x-www-form-urlencoded.*", media_type) is not None: - return media_type, serialize_form_data(field_name, request), None - if isinstance(request, (bytes, bytearray)): - return media_type, request, None - if isinstance(request, str): - return media_type, request, None - - raise Exception( - f"invalid request body type {type(request)} for mediaType {media_type}" - ) - - -def serialize_multipart_form( - media_type: str, request: Any -) -> Tuple[str, Any, List[List[Any]]]: - form: List[List[Any]] = [] - request_fields = fields(request) - - for field in request_fields: - val = getattr(request, field.name) - if val is None: - continue - - field_metadata = field.metadata.get("multipart_form") - if not field_metadata: - continue - - if field_metadata.get("file") is True: - file_fields = fields(val) - - file_name = "" - field_name = "" - content = bytes() - - for file_field in file_fields: - file_metadata = file_field.metadata.get("multipart_form") - if file_metadata is None: - continue - - if file_metadata.get("content") is True: - content = getattr(val, file_field.name) - else: - field_name = file_metadata.get("field_name", file_field.name) - file_name = getattr(val, file_field.name) - if field_name == "" or file_name == "" or content == bytes(): - raise Exception("invalid multipart/form-data file") - - form.append([field_name, [file_name, content]]) - elif field_metadata.get("json") is True: - to_append = [ - field_metadata.get("field_name", field.name), - [None, marshal_json(val, field.type), "application/json"], - ] - form.append(to_append) - else: - field_name = field_metadata.get("field_name", field.name) - if isinstance(val, List): - for value in val: - if value is None: - continue - form.append([field_name + "[]", [None, _val_to_string(value)]]) - else: - form.append([field_name, [None, _val_to_string(val)]]) - return media_type, None, form - - -def serialize_dict( - original: Dict, explode: bool, field_name, existing: Optional[Dict[str, List[str]]] -) -> Dict[str, List[str]]: - if existing is None: - existing = {} - - if explode is True: - for key, val in original.items(): - if key not in existing: - existing[key] = [] - existing[key].append(val) - else: - temp = [] - for key, val in original.items(): - temp.append(str(key)) - temp.append(str(val)) - if field_name not in existing: - existing[field_name] = [] - existing[field_name].append(",".join(temp)) - return existing - - -def serialize_form_data(field_name: str, data: Any) -> Dict[str, Any]: - form: Dict[str, List[str]] = {} - - if is_dataclass(data): - for field in fields(data): - val = getattr(data, field.name) - if val is None: - continue - - metadata = field.metadata.get("form") - if metadata is None: - continue - - field_name = metadata.get("field_name", field.name) - - if metadata.get("json"): - form[field_name] = [marshal_json(val, field.type)] - else: - if metadata.get("style", "form") == "form": - _populate_form( - field_name, - metadata.get("explode", True), - val, - _get_form_field_name, - ",", - form, - ) - else: - raise Exception(f"Invalid form style for field {field.name}") - elif isinstance(data, Dict): - for key, value in data.items(): - form[key] = [_val_to_string(value)] - else: - raise Exception(f"Invalid request body type for field {field_name}") - - return form - - -def _get_form_field_name(obj_field: Field) -> str: - obj_param_metadata = obj_field.metadata.get("form") - - if not obj_param_metadata: - return "" - - return obj_param_metadata.get("field_name", obj_field.name) - - -def _populate_form( - field_name: str, - explode: boolean, - obj: Any, - get_field_name_func: Callable, - delimiter: str, - form: Dict[str, List[str]], -): - if obj is None: - return form - - if is_dataclass(obj): - items = [] - - obj_fields: Tuple[Field, ...] = fields(obj) - for obj_field in obj_fields: - obj_field_name = get_field_name_func(obj_field) - if obj_field_name == "": - continue - - val = getattr(obj, obj_field.name) - if val is None: - continue - - if explode: - form[obj_field_name] = [_val_to_string(val)] - else: - items.append(f"{obj_field_name}{delimiter}{_val_to_string(val)}") - - if len(items) > 0: - form[field_name] = [delimiter.join(items)] - elif isinstance(obj, Dict): - items = [] - for key, value in obj.items(): - if value is None: - continue - - if explode: - form[key] = [_val_to_string(value)] - else: - items.append(f"{key}{delimiter}{_val_to_string(value)}") - - if len(items) > 0: - form[field_name] = [delimiter.join(items)] - elif isinstance(obj, List): - items = [] - - for value in obj: - if value is None: - continue - - if explode: - if not field_name in form: - form[field_name] = [] - form[field_name].append(_val_to_string(value)) - else: - items.append(_val_to_string(value)) - - if len(items) > 0: - form[field_name] = [delimiter.join([str(item) for item in items])] - else: - form[field_name] = [_val_to_string(obj)] - - return form - - -def _serialize_header(explode: bool, obj: Any) -> str: - if obj is None: - return "" - - if is_dataclass(obj): - items = [] - obj_fields: Tuple[Field, ...] = fields(obj) - for obj_field in obj_fields: - obj_param_metadata = obj_field.metadata.get("header") - - if not obj_param_metadata: - continue - - obj_field_name = obj_param_metadata.get("field_name", obj_field.name) - if obj_field_name == "": - continue - - val = getattr(obj, obj_field.name) - if val is None: - continue - - if explode: - items.append(f"{obj_field_name}={_val_to_string(val)}") - else: - items.append(obj_field_name) - items.append(_val_to_string(val)) - - if len(items) > 0: - return ",".join(items) - elif isinstance(obj, Dict): - items = [] - - for key, value in obj.items(): - if value is None: - continue - - if explode: - items.append(f"{key}={_val_to_string(value)}") - else: - items.append(key) - items.append(_val_to_string(value)) - - if len(items) > 0: - return ",".join([str(item) for item in items]) - elif isinstance(obj, List): - items = [] - - for value in obj: - if value is None: - continue - - items.append(_val_to_string(value)) - - if len(items) > 0: - return ",".join(items) - else: - return f"{_val_to_string(obj)}" - - return "" - - -def unmarshal_json(data, typ, decoder=None, infer_missing=False): - unmarshal = make_dataclass("Unmarshal", [("res", typ)], bases=(DataClassJsonMixin,)) - json_dict = json.loads(data) - try: - out = unmarshal.from_dict({"res": json_dict}, infer_missing=infer_missing) - except AttributeError as attr_err: - raise AttributeError( - f"unable to unmarshal {data} as {typ} - {attr_err}" - ) from attr_err - - return out.res if decoder is None else decoder(out.res) - - -def marshal_json(val, typ, encoder=None): - if not is_optional_type(typ) and val is None: - raise ValueError(f"Could not marshal None into non-optional type: {typ}") - - marshal = make_dataclass("Marshal", [("res", typ)], bases=(DataClassJsonMixin,)) - marshaller = marshal(res=val) - json_dict = marshaller.to_dict() - val = json_dict["res"] if encoder is None else encoder(json_dict["res"]) - - return json.dumps(val, separators=(",", ":"), sort_keys=True) - - -def match_content_type(content_type: str, pattern: str) -> boolean: - if pattern in (content_type, "*", "*/*"): - return True - - msg = Message() - msg["content-type"] = content_type - media_type = msg.get_content_type() - - if media_type == pattern: - return True - - parts = media_type.split("/") - if len(parts) == 2: - if pattern in (f"{parts[0]}/*", f"*/{parts[1]}"): - return True - - return False - - -def match_status_codes(status_codes: List[str], status_code: int) -> bool: - for code in status_codes: - if code == str(status_code): - return True - - if code.endswith("XX") and code.startswith(str(status_code)[:1]): - return True - return False - - -def datetimeisoformat(optional: bool): - def isoformatoptional(val): - if optional and val is None: - return None - return _val_to_string(val) - - return isoformatoptional - - -def dateisoformat(optional: bool): - def isoformatoptional(val): - if optional and val is None: - return None - return date.isoformat(val) - - return isoformatoptional - - -def datefromisoformat(date_str: str): - return dateutil.parser.parse(date_str).date() - - -def bigintencoder(optional: bool): - def bigintencode(val: int): - if optional and val is None: - return None - return str(val) - - return bigintencode - - -def bigintdecoder(val): - if val is None: - return None - - if isinstance(val, float): - raise ValueError(f"{val} is a float") - return int(val) - -def integerstrencoder(optional: bool): - def integerstrencode(val: int): - if optional and val is None: - return None - return str(val) - - return integerstrencode - - -def integerstrdecoder(val): - if val is None: - return None - - if isinstance(val, float): - raise ValueError(f"{val} is a float") - return int(val) - - -def numberstrencoder(optional: bool): - def numberstrencode(val: float): - if optional and val is None: - return None - return str(val) - - return numberstrencode - - -def numberstrdecoder(val): - if val is None: - return None - - return float(val) - - -def decimalencoder(optional: bool, as_str: bool): - def decimalencode(val: Decimal): - if optional and val is None: - return None - - if as_str: - return str(val) - - return float(val) - - return decimalencode - - -def decimaldecoder(val): - if val is None: - return None - - return Decimal(str(val)) - - -def map_encoder(optional: bool, value_encoder: Callable): - def map_encode(val: Dict): - if optional and val is None: - return None - - encoded = {} - for key, value in val.items(): - encoded[key] = value_encoder(value) - - return encoded - - return map_encode - - -def map_decoder(value_decoder: Callable): - def map_decode(val: Dict): - decoded = {} - for key, value in val.items(): - decoded[key] = value_decoder(value) - - return decoded - - return map_decode - - -def list_encoder(optional: bool, value_encoder: Callable): - def list_encode(val: List): - if optional and val is None: - return None - - encoded = [] - for value in val: - encoded.append(value_encoder(value)) - - return encoded - - return list_encode - - -def list_decoder(value_decoder: Callable): - def list_decode(val: List): - decoded = [] - for value in val: - decoded.append(value_decoder(value)) - - return decoded - - return list_decode - - -def union_encoder(all_encoders: Dict[str, Callable]): - def selective_encoder(val: Any): - if type(val) in all_encoders: - return all_encoders[type(val)](val) - return val - - return selective_encoder - - -def union_decoder(all_decoders: List[Callable]): - def selective_decoder(val: Any): - decoded = val - for decoder in all_decoders: - try: - decoded = decoder(val) - break - except (TypeError, ValueError): - continue - return decoded - - return selective_decoder - - -def get_field_name(name): - def override(_, _field_name=name): - return _field_name - - return override - - -def _val_to_string(val) -> str: - if isinstance(val, bool): - return str(val).lower() - if isinstance(val, datetime): - return str(val.isoformat().replace("+00:00", "Z")) - if isinstance(val, Enum): - return str(val.value) - - return str(val) - - -def _populate_from_globals( - param_name: str, value: Any, param_type: str, gbls: Any -) -> Tuple[Any, bool]: - if gbls is None: - return value, False - - global_fields = fields(gbls) - - found = False - for field in global_fields: - if field.name is not param_name: - continue - - found = True - - if value is not None: - return value, True - - global_value = getattr(gbls, field.name) - - param_metadata = field.metadata.get(param_type) - if param_metadata is None: - return value, True - - return global_value, True - - return value, found - - -def decoder_with_discriminator(field_name): - def decode_fx(obj): - kls = getattr(sys.modules["sdk.models.components"], obj[field_name]) - return unmarshal_json(json.dumps(obj), kls) - - return decode_fx - - -def remove_suffix(input_string, suffix): - if suffix and input_string.endswith(suffix): - return input_string[: -len(suffix)] - return input_string diff --git a/src/dub/utils/values.py b/src/dub/utils/values.py new file mode 100644 index 0000000..f3e159e --- /dev/null +++ b/src/dub/utils/values.py @@ -0,0 +1,110 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from datetime import datetime +from enum import Enum +from email.message import Message +from typing import Any, Dict, List, Tuple, Union + +from httpx import Response +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .serializers import marshal_json + +from .metadata import ParamMetadata, find_field_metadata + + +def match_content_type(content_type: str, pattern: str) -> bool: + if pattern in (content_type, "*", "*/*"): + return True + + msg = Message() + msg["content-type"] = content_type + media_type = msg.get_content_type() + + if media_type == pattern: + return True + + parts = media_type.split("/") + if len(parts) == 2: + if pattern in (f"{parts[0]}/*", f"*/{parts[1]}"): + return True + + return False + + +def match_status_codes(status_codes: List[str], status_code: int) -> bool: + if "default" in status_codes: + return True + + for code in status_codes: + if code == str(status_code): + return True + + if code.endswith("XX") and code.startswith(str(status_code)[:1]): + return True + return False + + +def match_response( + response: Response, code: Union[str, List[str]], content_type: str +) -> bool: + codes = code if isinstance(code, list) else [code] + return match_status_codes(codes, response.status_code) and match_content_type( + response.headers.get("content-type", "application/octet-stream"), content_type + ) + + +def _populate_from_globals( + param_name: str, value: Any, param_metadata_type: type, gbls: Any +) -> Tuple[Any, bool]: + if gbls is None: + return value, False + + if not isinstance(gbls, BaseModel): + raise TypeError("globals must be a pydantic model") + + global_fields: Dict[str, FieldInfo] = gbls.__class__.model_fields + found = False + for name in global_fields: + field = global_fields[name] + if name is not param_name: + continue + + found = True + + if value is not None: + return value, True + + global_value = getattr(gbls, name) + + param_metadata = find_field_metadata(field, param_metadata_type) + if param_metadata is None: + return value, True + + return global_value, True + + return value, found + + +def _val_to_string(val) -> str: + if isinstance(val, bool): + return str(val).lower() + if isinstance(val, datetime): + return str(val.isoformat().replace("+00:00", "Z")) + if isinstance(val, Enum): + return str(val.value) + + return str(val) + + +def _get_serialized_params( + metadata: ParamMetadata, field_name: str, obj: Any, typ: type +) -> Dict[str, str]: + params: Dict[str, str] = {} + + serialization = metadata.serialization + if serialization == "json": + params[field_name] = marshal_json(obj, typ) + + return params diff --git a/src/dub/workspaces.py b/src/dub/workspaces.py index 79c5850..32208f4 100644 --- a/src/dub/workspaces.py +++ b/src/dub/workspaces.py @@ -1,269 +1,329 @@ """Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" -import requests as requests_http -from .sdkconfiguration import SDKConfiguration -from dub import utils -from dub._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from .basesdk import BaseSDK +from dub._hooks import HookContext from dub.models import components, errors, operations -from typing import Optional +from dub.types import BaseModel +import dub.utils as utils +from typing import Optional, Union -class Workspaces: - sdk_configuration: SDKConfiguration - - def __init__(self, sdk_config: SDKConfiguration) -> None: - self.sdk_configuration = sdk_config - +class Workspaces(BaseSDK): - def get(self, request: operations.GetWorkspaceRequest) -> components.WorkspaceSchema: + def get( + self, *, + request: Union[operations.GetWorkspaceRequest, operations.GetWorkspaceRequestTypedDict], + server_url: Optional[str] = None, + ) -> components.WorkspaceSchema: r"""Retrieve a workspace + Retrieve a workspace for the authenticated user. + + :param request: The request object to send. + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='getWorkspace', oauth2_scopes=[], security_source=self.sdk_configuration.security) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url - url = utils.generate_url(base_url, '/workspaces/{idOrSlug}', request) + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetWorkspaceRequest) - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) + req = self.build_request( + method="GET", + path="/workspaces/{idOrSlug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + ) - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getWorkspace", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) - try: - req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.WorkspaceSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def get_async( + self, *, + request: Union[operations.GetWorkspaceRequest, operations.GetWorkspaceRequestTypedDict], + server_url: Optional[str] = None, + ) -> components.WorkspaceSchema: + r"""Retrieve a workspace + + Retrieve a workspace for the authenticated user. - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - + :param request: The request object to send. + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetWorkspaceRequest) + req = self.build_request( + method="GET", + path="/workspaces/{idOrSlug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + ) - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.WorkspaceSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) - + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getWorkspace", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.WorkspaceSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) - def update(self, request: operations.UpdateWorkspaceRequest) -> components.WorkspaceSchema: + def update( + self, *, + id_or_slug: str, + request_body: Optional[Union[operations.UpdateWorkspaceRequestBody, operations.UpdateWorkspaceRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.WorkspaceSchema: r"""Update a workspace + Update a workspace by ID or slug. + + :param id_or_slug: The ID or slug of the workspace to update. + :param request_body: + :param server_url: Override the default server URL for this method """ - hook_ctx = HookContext(operation_id='updateWorkspace', oauth2_scopes=[], security_source=self.sdk_configuration.security) - base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + + request = operations.UpdateWorkspaceRequest( + id_or_slug=id_or_slug, + request_body=utils.unmarshal(request_body, operations.UpdateWorkspaceRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + + req = self.build_request( + method="PATCH", + path="/workspaces/{idOrSlug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateWorkspaceRequestBody]), + ) - url = utils.generate_url(base_url, '/workspaces/{idOrSlug}', request) + http_res = self.do_request( + hook_ctx=HookContext(operation_id="updateWorkspace", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) - if callable(self.sdk_configuration.security): - headers, query_params = utils.get_security(self.sdk_configuration.security()) - else: - headers, query_params = utils.get_security(self.sdk_configuration.security) - headers['Accept'] = 'application/json' - headers['user-agent'] = self.sdk_configuration.user_agent - client = self.sdk_configuration.client + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.WorkspaceSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) - try: - req = client.prepare_request(requests_http.Request('PATCH', url, params=query_params, headers=headers)) - req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) - http_res = client.send(req) - except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) - if e is not None: - raise e + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + async def update_async( + self, *, + id_or_slug: str, + request_body: Optional[Union[operations.UpdateWorkspaceRequestBody, operations.UpdateWorkspaceRequestBodyTypedDict]] = None, + server_url: Optional[str] = None, + ) -> components.WorkspaceSchema: + r"""Update a workspace - if utils.match_status_codes(['400','401','403','404','409','410','422','429','4XX','500','5XX'], http_res.status_code): - result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) - if e is not None: - raise e - if result is not None: - http_res = result - else: - http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) - + Update a workspace by ID or slug. + + :param id_or_slug: The ID or slug of the workspace to update. + :param request_body: + :param server_url: Override the default server URL for this method + """ + base_url = None + url_variables = None + if server_url is not None: + base_url = server_url + request = operations.UpdateWorkspaceRequest( + id_or_slug=id_or_slug, + request_body=utils.unmarshal(request_body, operations.UpdateWorkspaceRequestBody) if not isinstance(request_body, BaseModel) and request_body is not None else request_body, + ) + req = self.build_request( + method="PATCH", + path="/workspaces/{idOrSlug}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.request_body, False, True, "json", Optional[operations.UpdateWorkspaceRequestBody]), + ) - if http_res.status_code == 200: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[components.WorkspaceSchema]) - return out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.BadRequest) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 401: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Unauthorized) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Forbidden) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 404: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.NotFound) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 409: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.Conflict) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 410: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InviteExpired) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 422: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.UnprocessableEntity) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 429: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.RateLimitExceeded) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 500: - # pylint: disable=no-else-return - if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): - out = utils.unmarshal_json(http_res.text, errors.InternalServerError) - raise out - - content_type = http_res.headers.get('Content-Type') - raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: - raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) - else: - raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) - + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="updateWorkspace", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","403","404","409","410","422","429","4XX","500","5XX"], + ) + + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[components.WorkspaceSchema]) + if utils.match_response(http_res, "400", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.BadRequestData) + raise errors.BadRequest(data=data) + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnauthorizedData) + raise errors.Unauthorized(data=data) + if utils.match_response(http_res, "403", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ForbiddenData) + raise errors.Forbidden(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.NotFoundData) + raise errors.NotFound(data=data) + if utils.match_response(http_res, "409", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.ConflictData) + raise errors.Conflict(data=data) + if utils.match_response(http_res, "410", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InviteExpiredData) + raise errors.InviteExpired(data=data) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.UnprocessableEntityData) + raise errors.UnprocessableEntity(data=data) + if utils.match_response(http_res, "429", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.RateLimitExceededData) + raise errors.RateLimitExceeded(data=data) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, errors.InternalServerErrorData) + raise errors.InternalServerError(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise errors.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise errors.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) -