Skip to content

Commit

Permalink
fix pytests and linter
Browse files Browse the repository at this point in the history
Signed-off-by: ravi-kumar-pilla <[email protected]>
  • Loading branch information
ravi-kumar-pilla committed May 17, 2024
1 parent 2f67e78 commit 12eabd1
Show file tree
Hide file tree
Showing 11 changed files with 51 additions and 34 deletions.
8 changes: 4 additions & 4 deletions package/kedro_viz/api/rest/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,11 +421,11 @@ def save_api_main_response_to_fs(main_path: str, remote_fs: Any):


def save_api_node_response_to_fs(
nodes_path: str, remote_fs: Any, is_datasets_previewed: bool
nodes_path: str, remote_fs: Any, are_datasets_previewable: bool
):
"""Saves API /nodes/{node} response to a directory."""
# Set if preview is enabled/disabled for all data nodes
DataNodeMetadata.set_is_datasets_previewed(is_datasets_previewed)
DataNodeMetadata.set_are_datasets_previewable(are_datasets_previewable)

for nodeId in data_access_manager.nodes.get_node_ids():
try:
Expand Down Expand Up @@ -457,7 +457,7 @@ def save_api_pipeline_response_to_fs(pipelines_path: str, remote_fs: Any):
raise exc


def save_api_responses_to_fs(path: str, remote_fs: Any, is_datasets_previewed: bool):
def save_api_responses_to_fs(path: str, remote_fs: Any, are_datasets_previewable: bool):
"""Saves all Kedro Viz API responses to a directory."""
try:
logger.debug(
Expand All @@ -475,7 +475,7 @@ def save_api_responses_to_fs(path: str, remote_fs: Any, is_datasets_previewed: b
remote_fs.makedirs(pipelines_path, exist_ok=True)

save_api_main_response_to_fs(main_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs, is_datasets_previewed)
save_api_node_response_to_fs(nodes_path, remote_fs, are_datasets_previewable)
save_api_pipeline_response_to_fs(pipelines_path, remote_fs)

except Exception as exc: # pragma: no cover
Expand Down
8 changes: 4 additions & 4 deletions package/kedro_viz/integrations/deployment/base_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ def __init__(self):
self._path = None
self._fs = None

def _upload_api_responses(self, is_datasets_previewed: bool):
def _upload_api_responses(self, are_datasets_previewable: bool):
"""Write API responses to the build."""
save_api_responses_to_fs(self._path, self._fs, is_datasets_previewed)
save_api_responses_to_fs(self._path, self._fs, are_datasets_previewable)

def _ingest_heap_analytics(self):
"""Ingest heap analytics to index file in the build."""
Expand Down Expand Up @@ -98,9 +98,9 @@ def _upload_deploy_viz_metadata_file(self):
logger.exception("Upload failed: %s ", exc)
raise exc

def deploy(self, is_datasets_previewed: bool = False):
def deploy(self, are_datasets_previewable: bool = False):
"""Create and deploy all static files to local/remote file system"""

self._upload_api_responses(is_datasets_previewed)
self._upload_api_responses(are_datasets_previewable)
self._upload_static_files(_HTML_DIR)
self._upload_deploy_viz_metadata_file()
8 changes: 4 additions & 4 deletions package/kedro_viz/launchers/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def build(include_hooks, preview):

def create_shareableviz_process(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint=None,
bucket_name=None,
include_hooks=False,
Expand All @@ -299,7 +299,7 @@ def create_shareableviz_process(
target=load_and_deploy_viz,
args=(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint,
bucket_name,
include_hooks,
Expand Down Expand Up @@ -375,7 +375,7 @@ def create_shareableviz_process(

def load_and_deploy_viz(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint,
bucket_name,
include_hooks,
Expand All @@ -391,7 +391,7 @@ def load_and_deploy_viz(

# Start the deployment
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
deployer.deploy(is_datasets_previewed)
deployer.deploy(are_datasets_previewable)

except (
# pylint: disable=catching-non-exception
Expand Down
14 changes: 9 additions & 5 deletions package/kedro_viz/models/flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -735,7 +735,7 @@ class DataNodeMetadata(GraphNodeMetadata):

data_node: DataNode = Field(..., exclude=True)

is_datasets_previewed: ClassVar[bool] = True
are_datasets_previewable: ClassVar[bool] = True

type: Optional[str] = Field(
default=None, validate_default=True, description="The type of the data node"
Expand Down Expand Up @@ -779,8 +779,8 @@ def check_data_node_exists(cls, values):
return values

@classmethod
def set_is_datasets_previewed(cls, value: bool):
cls.is_datasets_previewed = value
def set_are_datasets_previewable(cls, value: bool):
cls.are_datasets_previewable = value

@classmethod
def set_data_node_and_dataset(cls, data_node):
Expand Down Expand Up @@ -815,7 +815,7 @@ def set_preview(cls, _):
if (
cls.data_node.is_preview_disabled()
or not hasattr(cls.dataset, "preview")
or not cls.is_datasets_previewed
or not cls.are_datasets_previewable
):
return None

Expand All @@ -839,7 +839,11 @@ def set_preview(cls, _):
@field_validator("preview_type")
@classmethod
def set_preview_type(cls, _):
if cls.data_node.is_preview_disabled() or not hasattr(cls.dataset, "preview"):
if (
cls.data_node.is_preview_disabled()
or not hasattr(cls.dataset, "preview")
or not cls.are_datasets_previewable
):
return None

try:
Expand Down
7 changes: 6 additions & 1 deletion package/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from kedro_viz.data_access import DataAccessManager
from kedro_viz.integrations.kedro.hooks import DatasetStatsHook
from kedro_viz.integrations.kedro.sqlite_store import SQLiteStore
from kedro_viz.models.flowchart import GraphNode
from kedro_viz.models.flowchart import DataNodeMetadata, GraphNode
from kedro_viz.server import populate_data


Expand Down Expand Up @@ -350,3 +350,8 @@ def pipeline_with_data_sets_mock():
pipeline = mock.MagicMock()
pipeline.data_sets.return_value = ["model_inputs#csv"]
return pipeline


@pytest.fixture(autouse=True)
def reset_are_datasets_previewable():
DataNodeMetadata.are_datasets_previewable = True
8 changes: 4 additions & 4 deletions package/tests/test_api/test_rest/test_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -1031,15 +1031,15 @@ def test_save_api_pipeline_response_to_fs(self, mocker):
mock_write_api_response_to_fs.assert_has_calls(expected_calls, any_order=True)

@pytest.mark.parametrize(
"file_path, protocol, is_datasets_previewed",
"file_path, protocol, are_datasets_previewable",
[
("s3://shareableviz", "s3", True),
("abfs://shareableviz", "abfs", False),
("shareableviz", "file", True),
],
)
def test_save_api_responses_to_fs(
self, file_path, protocol, is_datasets_previewed, mocker
self, file_path, protocol, are_datasets_previewable, mocker
):
mock_api_main_response_to_fs = mocker.patch(
"kedro_viz.api.rest.responses.save_api_main_response_to_fs"
Expand All @@ -1055,7 +1055,7 @@ def test_save_api_responses_to_fs(
mock_filesystem.return_value.protocol = protocol

save_api_responses_to_fs(
file_path, mock_filesystem.return_value, is_datasets_previewed
file_path, mock_filesystem.return_value, are_datasets_previewable
)

mock_api_main_response_to_fs.assert_called_once_with(
Expand All @@ -1064,7 +1064,7 @@ def test_save_api_responses_to_fs(
mock_api_node_response_to_fs.assert_called_once_with(
f"{file_path}/api/nodes",
mock_filesystem.return_value,
is_datasets_previewed,
are_datasets_previewable,
)
mock_api_pipeline_response_to_fs.assert_called_once_with(
f"{file_path}/api/pipelines", mock_filesystem.return_value
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_azure_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def mock_file_system(mocker):


class TestAzureDeployer:
def test_deploy(self, endpoint, bucket_name, mocker):
def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system):
deployer = AzureDeployer(endpoint, bucket_name)

mocker.patch.object(deployer, "_upload_api_responses")
Expand Down
3 changes: 2 additions & 1 deletion package/tests/test_integrations/test_deployer_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
("gcp", "http://mocked-url.com", "shareableviz", GCPDeployer),
],
)
def test_create_deployer(platform, endpoint, bucket_name, deployer_class):
def test_create_deployer(platform, endpoint, bucket_name, deployer_class, mocker):
mocker.patch("fsspec.filesystem")
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
assert isinstance(deployer, deployer_class)
assert deployer._endpoint == endpoint
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_gcp_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def mock_file_system(mocker):


class TestGCPDeployer:
def test_deploy(self, endpoint, bucket_name, mocker):
def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system):
deployer = GCPDeployer(endpoint, bucket_name)

mocker.patch.object(deployer, "_upload_api_responses")
Expand Down
16 changes: 8 additions & 8 deletions package/tests/test_launchers/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ def test_successful_build_with_existing_static_files(


@pytest.mark.parametrize(
"platform, is_datasets_previewed, endpoint, bucket_name,"
"platform, are_datasets_previewable, endpoint, bucket_name,"
"include_hooks, process_completed_value",
[
(
Expand Down Expand Up @@ -645,7 +645,7 @@ def test_successful_build_with_existing_static_files(
)
def test_create_shareableviz_process(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint,
bucket_name,
include_hooks,
Expand All @@ -659,15 +659,15 @@ def test_create_shareableviz_process(
):
mock_process_completed.return_value.value = process_completed_value
cli.create_shareableviz_process(
platform, is_datasets_previewed, endpoint, bucket_name, include_hooks
platform, are_datasets_previewable, endpoint, bucket_name, include_hooks
)

# Assert the mocks were called as expected
mock_viz_deploy_process.assert_called_once_with(
target=mock_viz_load_and_deploy,
args=(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint,
bucket_name,
include_hooks,
Expand Down Expand Up @@ -707,7 +707,7 @@ def test_create_shareableviz_process(


@pytest.mark.parametrize(
"platform, is_datasets_previewed, endpoint, bucket_name, include_hooks, package_name",
"platform, are_datasets_previewable, endpoint, bucket_name, include_hooks, package_name",
[
(
"azure",
Expand All @@ -731,7 +731,7 @@ def test_create_shareableviz_process(
)
def test_load_and_deploy_viz_success(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint,
bucket_name,
include_hooks,
Expand All @@ -747,7 +747,7 @@ def test_load_and_deploy_viz_success(

cli.load_and_deploy_viz(
platform,
is_datasets_previewed,
are_datasets_previewable,
endpoint,
bucket_name,
include_hooks,
Expand All @@ -762,5 +762,5 @@ def test_load_and_deploy_viz_success(
mock_DeployerFactory.create_deployer.assert_called_once_with(
platform, endpoint, bucket_name
)
deployer_mock.deploy.assert_called_once_with(is_datasets_previewed)
deployer_mock.deploy.assert_called_once_with(are_datasets_previewable)
mock_click_echo.echo.assert_not_called()
9 changes: 8 additions & 1 deletion package/tests/test_models/test_flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,13 @@ def test_is_preview_disabled(self):
)
assert data_node.is_preview_disabled() is True

def test_are_datasets_previewable(self, example_data_node):
DataNodeMetadata.set_are_datasets_previewable(False)
preview_node_metadata = DataNodeMetadata(data_node=example_data_node)

assert preview_node_metadata.preview is None
assert preview_node_metadata.preview_type is None

def test_preview_data_node_metadata(self, example_data_node):
expected_preview_data = {
"columns": ["id", "company_rating", "company_location"],
Expand All @@ -405,10 +412,10 @@ def test_preview_data_node_metadata(self, example_data_node):
],
}

DataNodeMetadata.set_is_datasets_previewed(True)
preview_node_metadata = DataNodeMetadata(data_node=example_data_node)

assert preview_node_metadata.preview == expected_preview_data
assert preview_node_metadata.preview_type == "TablePreview"

def test_preview_data_node_metadata_exception(self, caplog):
empty_dataset = CSVDataset(filepath="temp.csv")
Expand Down

0 comments on commit 12eabd1

Please sign in to comment.