Skip to content

Commit

Permalink
lig-3245: Introduce new generated code (#1271)
Browse files Browse the repository at this point in the history
Python API client generated with openapi-generator. This new code generator provides better support for validation and type hinting.
  • Loading branch information
huan-lightly-0 authored Jun 7, 2023
1 parent 6457085 commit 1bf8d2b
Show file tree
Hide file tree
Showing 322 changed files with 34,401 additions and 44,327 deletions.
2 changes: 1 addition & 1 deletion .coveragerc
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[run]
omit =
lightly/openapi_generated/*
lightly/openapi_generated/*
2 changes: 1 addition & 1 deletion .gitattributes
Original file line number Diff line number Diff line change
@@ -1 +1 @@
lightly/openapi_generated linguist-generated=true
lightly/openapi_generated linguist-generated=true
4 changes: 3 additions & 1 deletion lightly/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@
from lightly.api import patch as _patch
from lightly.api.api_workflow_artifacts import ArtifactNotExist
from lightly.api.api_workflow_client import ApiWorkflowClient
from lightly.openapi_generated.swagger_client import Configuration as _Configuration
from lightly.openapi_generated.swagger_client.api_client import (
Configuration as _Configuration,
)

# Make ApiWorkflowClient and swagger classes picklable.
_patch.make_swagger_configuration_picklable(
Expand Down
6 changes: 2 additions & 4 deletions lightly/api/api_workflow_artifacts.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import os

from lightly.api import download
from lightly.openapi_generated.swagger_client import (
from lightly.openapi_generated.swagger_client.models import (
DockerRunArtifactData,
DockerRunData,
)
from lightly.openapi_generated.swagger_client.models.docker_run_artifact_type import (
DockerRunArtifactType,
DockerRunData,
)


Expand Down
8 changes: 3 additions & 5 deletions lightly/api/api_workflow_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,8 @@
LightlyAPITimeoutException,
is_compatible_version,
)
from lightly.openapi_generated.swagger_client import (
ApiClient,
from lightly.openapi_generated.swagger_client.api import (
CollaborationApi,
Creator,
DatasetData,
DatasetsApi,
DatasourcesApi,
DockerApi,
Expand All @@ -50,6 +47,7 @@
ScoresApi,
TagsApi,
)
from lightly.openapi_generated.swagger_client.models import Creator, DatasetData
from lightly.openapi_generated.swagger_client.rest import ApiException
from lightly.utils.reordering import sort_items_by_keys

Expand Down Expand Up @@ -125,7 +123,7 @@ def __init__(
self.api_client = LightlySwaggerApiClient(configuration=configuration)
self.api_client.user_agent = f"Lightly/{__version__} ({platform.system()}/{platform.release()}; {platform.platform()}; {platform.processor()};) python/{platform.python_version()}"

self.token = configuration.api_key["token"]
self.token = configuration.api_key["ApiKeyAuth"]
if dataset_id is not None:
self._dataset_id = dataset_id
if embedding_id is not None:
Expand Down
8 changes: 2 additions & 6 deletions lightly/api/api_workflow_collaboration.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
from typing import List

from lightly.openapi_generated.swagger_client.models.shared_access_config_create_request import (
from lightly.openapi_generated.swagger_client.models import (
SharedAccessConfigCreateRequest,
)
from lightly.openapi_generated.swagger_client.models.shared_access_config_data import (
SharedAccessConfigData,
)
from lightly.openapi_generated.swagger_client.models.shared_access_type import (
SharedAccessType,
)

Expand Down Expand Up @@ -45,7 +41,7 @@ def share_dataset_only_with(self, dataset_id: str, user_emails: List[str]) -> No
access_type=SharedAccessType.WRITE, users=user_emails, creator=self._creator
)
self._collaboration_api.create_or_update_shared_access_config_by_dataset_id(
body=body, dataset_id=dataset_id
shared_access_config_create_request=body, dataset_id=dataset_id
)

def get_shared_users(self, dataset_id: str) -> List[str]:
Expand Down
17 changes: 6 additions & 11 deletions lightly/api/api_workflow_compute_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@

from lightly.api import utils
from lightly.api.utils import retry
from lightly.openapi_generated.swagger_client import (
ApiClient,
from lightly.openapi_generated.swagger_client.api_client import ApiClient
from lightly.openapi_generated.swagger_client.models import (
CreateDockerWorkerRegistryEntryRequest,
DockerRunData,
DockerRunScheduledCreateRequest,
Expand Down Expand Up @@ -308,7 +308,7 @@ def schedule_compute_worker_run(
creator=self._creator,
)
response = self._compute_worker_api.create_docker_run_scheduled_by_dataset_id(
body=request,
docker_run_scheduled_create_request=request,
dataset_id=self.dataset_id,
)
return response.id
Expand Down Expand Up @@ -673,24 +673,19 @@ def _validate_config(
Recursively checks if the keys in the cfg dictionary match the attributes of
the DockerWorkerConfigV2Docker/DockerWorkerConfigV2Lightly instances. If not,
suggests a best match based on the keys in 'swagger_types'.
suggests a best match.
Raises:
TypeError: If obj is not of swagger type.
InvalidConfigurationError: If obj is not a valid config.
"""

if cfg is None:
return

if not hasattr(type(obj), "swagger_types"):
raise TypeError(
f"Type {type(obj)} of argument 'obj' has not attribute 'swagger_types'"
)

for key, item in cfg.items():
if not hasattr(obj, key):
possible_options = list(type(obj).swagger_types.keys())
possible_options = list(obj.__fields__.keys())
closest_match = difflib.get_close_matches(
word=key, possibilities=possible_options, n=1, cutoff=0.0
)[0]
Expand Down
8 changes: 5 additions & 3 deletions lightly/api/api_workflow_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Iterator, List, Optional

from lightly.api import utils
from lightly.openapi_generated.swagger_client import (
from lightly.openapi_generated.swagger_client.models import (
CreateEntityResponse,
DatasetCreateRequest,
DatasetData,
Expand Down Expand Up @@ -292,7 +292,7 @@ def create_dataset(
Examples:
>>> from lightly.api import ApiWorkflowClient
>>> from lightly.openapi_generated.swagger_client.models.dataset_type import DatasetType
>>> from lightly.openapi_generated.swagger_client.models import DatasetType
>>>
>>> client = lightly.api.ApiWorkflowClient(token="YOUR_TOKEN")
>>> client.create_dataset('your-dataset-name', dataset_type=DatasetType.IMAGES)
Expand Down Expand Up @@ -337,7 +337,9 @@ def _create_dataset_without_check_existing(
body = DatasetCreateRequest(
name=dataset_name, type=dataset_type, creator=self._creator
)
response: CreateEntityResponse = self._datasets_api.create_dataset(body=body)
response: CreateEntityResponse = self._datasets_api.create_dataset(
dataset_create_request=body
)
self._dataset_id = response.id

def create_new_dataset_with_unique_name(
Expand Down
139 changes: 72 additions & 67 deletions lightly/api/api_workflow_datasources.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,12 @@

import tqdm

from lightly.openapi_generated.swagger_client import DatasourceConfigVerifyDataErrors
from lightly.openapi_generated.swagger_client.models.datasource_config import (
from lightly.openapi_generated.swagger_client.models import (
DatasourceConfig,
)
from lightly.openapi_generated.swagger_client.models.datasource_processed_until_timestamp_request import (
DatasourceConfigVerifyDataErrors,
DatasourceProcessedUntilTimestampRequest,
)
from lightly.openapi_generated.swagger_client.models.datasource_processed_until_timestamp_response import (
DatasourceProcessedUntilTimestampResponse,
)
from lightly.openapi_generated.swagger_client.models.datasource_purpose import (
DatasourcePurpose,
)
from lightly.openapi_generated.swagger_client.models.datasource_raw_samples_data import (
DatasourceRawSamplesData,
)

Expand Down Expand Up @@ -47,7 +39,7 @@ def _download_raw_files(

response: DatasourceRawSamplesData = download_function(
dataset_id=self.dataset_id,
_from=from_,
var_from=from_,
to=to,
use_redirected_read_url=use_redirected_read_url,
**relevant_filenames_kwargs,
Expand Down Expand Up @@ -411,7 +403,8 @@ def update_processed_until_timestamp(self, timestamp: int) -> None:
processed_until_timestamp=timestamp
)
self._datasources_api.update_datasource_processed_until_timestamp_by_dataset_id(
dataset_id=self.dataset_id, body=body
dataset_id=self.dataset_id,
datasource_processed_until_timestamp_request=body,
)

def get_datasource(self) -> DatasourceConfig:
Expand Down Expand Up @@ -461,14 +454,16 @@ def set_azure_config(
"""
# TODO: Use DatasourceConfigAzure once we switch/update the api generator.
self._datasources_api.update_datasource_by_dataset_id(
body={
"type": "AZURE",
"fullPath": container_name,
"thumbSuffix": thumbnail_suffix,
"accountName": account_name,
"accountKey": sas_token,
"purpose": purpose,
},
datasource_config=DatasourceConfig.from_dict(
{
"type": "AZURE",
"fullPath": container_name,
"thumbSuffix": thumbnail_suffix,
"accountName": account_name,
"accountKey": sas_token,
"purpose": purpose,
}
),
dataset_id=self.dataset_id,
)

Expand Down Expand Up @@ -509,14 +504,16 @@ def set_gcs_config(
"""
# TODO: Use DatasourceConfigGCS once we switch/update the api generator.
self._datasources_api.update_datasource_by_dataset_id(
body={
"type": "GCS",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"gcsProjectId": project_id,
"gcsCredentials": credentials,
"purpose": purpose,
},
datasource_config=DatasourceConfig.from_dict(
{
"type": "GCS",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"gcsProjectId": project_id,
"gcsCredentials": credentials,
"purpose": purpose,
}
),
dataset_id=self.dataset_id,
)

Expand All @@ -543,12 +540,14 @@ def set_local_config(
"""
# TODO: Use DatasourceConfigLocal once we switch/update the api generator.
self._datasources_api.update_datasource_by_dataset_id(
body={
"type": "LOCAL",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"purpose": DatasourcePurpose.INPUT_OUTPUT,
},
datasource_config=DatasourceConfig.from_dict(
{
"type": "LOCAL",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"purpose": DatasourcePurpose.INPUT_OUTPUT,
}
),
dataset_id=self.dataset_id,
)

Expand Down Expand Up @@ -590,15 +589,17 @@ def set_s3_config(
"""
# TODO: Use DatasourceConfigS3 once we switch/update the api generator.
self._datasources_api.update_datasource_by_dataset_id(
body={
"type": "S3",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"s3Region": region,
"s3AccessKeyId": access_key,
"s3SecretAccessKey": secret_access_key,
"purpose": purpose,
},
datasource_config=DatasourceConfig.from_dict(
{
"type": "S3",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"s3Region": region,
"s3AccessKeyId": access_key,
"s3SecretAccessKey": secret_access_key,
"purpose": purpose,
}
),
dataset_id=self.dataset_id,
)

Expand Down Expand Up @@ -640,15 +641,17 @@ def set_s3_delegated_access_config(
"""
# TODO: Use DatasourceConfigS3 once we switch/update the api generator.
self._datasources_api.update_datasource_by_dataset_id(
body={
"type": "S3DelegatedAccess",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"s3Region": region,
"s3ARN": role_arn,
"s3ExternalId": external_id,
"purpose": purpose,
},
datasource_config=DatasourceConfig.from_dict(
{
"type": "S3DelegatedAccess",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"s3Region": region,
"s3ARN": role_arn,
"s3ExternalId": external_id,
"purpose": purpose,
}
),
dataset_id=self.dataset_id,
)

Expand Down Expand Up @@ -686,15 +689,17 @@ def set_obs_config(
"""
# TODO: Use DatasourceConfigOBS once we switch/update the api generator.
self._datasources_api.update_datasource_by_dataset_id(
body={
"type": "OBS",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"obsEndpoint": obs_endpoint,
"obsAccessKeyId": obs_access_key_id,
"obsSecretAccessKey": obs_secret_access_key,
"purpose": purpose,
},
datasource_config=DatasourceConfig.from_dict(
{
"type": "OBS",
"fullPath": resource_path,
"thumbSuffix": thumbnail_suffix,
"obsEndpoint": obs_endpoint,
"obsAccessKeyId": obs_access_key_id,
"obsSecretAccessKey": obs_secret_access_key,
"purpose": purpose,
}
),
dataset_id=self.dataset_id,
)

Expand All @@ -714,8 +719,8 @@ def get_prediction_read_url(
"""
return self._datasources_api.get_prediction_file_read_url_from_datasource_by_dataset_id(
self.dataset_id,
filename,
dataset_id=self.dataset_id,
file_name=filename,
)

def get_metadata_read_url(
Expand All @@ -734,8 +739,8 @@ def get_metadata_read_url(
"""
return self._datasources_api.get_metadata_file_read_url_from_datasource_by_dataset_id(
self.dataset_id,
filename,
dataset_id=self.dataset_id,
file_name=filename,
)

def get_custom_embedding_read_url(
Expand All @@ -754,8 +759,8 @@ def get_custom_embedding_read_url(
"""
return self._datasources_api.get_custom_embedding_file_read_url_from_datasource_by_dataset_id(
self.dataset_id,
filename,
dataset_id=self.dataset_id,
file_name=filename,
)

def list_datasource_permissions(
Expand Down
Loading

0 comments on commit 1bf8d2b

Please sign in to comment.