diff --git a/package/kedro_viz/api/rest/responses.py b/package/kedro_viz/api/rest/responses.py index 46165a79c8..2c2ce33424 100644 --- a/package/kedro_viz/api/rest/responses.py +++ b/package/kedro_viz/api/rest/responses.py @@ -421,11 +421,11 @@ def save_api_main_response_to_fs(main_path: str, remote_fs: Any): def save_api_node_response_to_fs( - nodes_path: str, remote_fs: Any, is_datasets_previewed: bool + nodes_path: str, remote_fs: Any, are_datasets_previewable: bool ): """Saves API /nodes/{node} response to a directory.""" # Set if preview is enabled/disabled for all data nodes - DataNodeMetadata.set_is_datasets_previewed(is_datasets_previewed) + DataNodeMetadata.set_are_datasets_previewable(are_datasets_previewable) for nodeId in data_access_manager.nodes.get_node_ids(): try: @@ -457,7 +457,7 @@ def save_api_pipeline_response_to_fs(pipelines_path: str, remote_fs: Any): raise exc -def save_api_responses_to_fs(path: str, remote_fs: Any, is_datasets_previewed: bool): +def save_api_responses_to_fs(path: str, remote_fs: Any, are_datasets_previewable: bool): """Saves all Kedro Viz API responses to a directory.""" try: logger.debug( @@ -475,7 +475,7 @@ def save_api_responses_to_fs(path: str, remote_fs: Any, is_datasets_previewed: b remote_fs.makedirs(pipelines_path, exist_ok=True) save_api_main_response_to_fs(main_path, remote_fs) - save_api_node_response_to_fs(nodes_path, remote_fs, is_datasets_previewed) + save_api_node_response_to_fs(nodes_path, remote_fs, are_datasets_previewable) save_api_pipeline_response_to_fs(pipelines_path, remote_fs) except Exception as exc: # pragma: no cover diff --git a/package/kedro_viz/integrations/deployment/base_deployer.py b/package/kedro_viz/integrations/deployment/base_deployer.py index 555d25c4f2..73e23d815a 100644 --- a/package/kedro_viz/integrations/deployment/base_deployer.py +++ b/package/kedro_viz/integrations/deployment/base_deployer.py @@ -33,9 +33,9 @@ def __init__(self): self._path = None self._fs = None - def _upload_api_responses(self, is_datasets_previewed: bool): + def _upload_api_responses(self, are_datasets_previewable: bool): """Write API responses to the build.""" - save_api_responses_to_fs(self._path, self._fs, is_datasets_previewed) + save_api_responses_to_fs(self._path, self._fs, are_datasets_previewable) def _ingest_heap_analytics(self): """Ingest heap analytics to index file in the build.""" @@ -98,9 +98,9 @@ def _upload_deploy_viz_metadata_file(self): logger.exception("Upload failed: %s ", exc) raise exc - def deploy(self, is_datasets_previewed: bool = False): + def deploy(self, are_datasets_previewable: bool = False): """Create and deploy all static files to local/remote file system""" - self._upload_api_responses(is_datasets_previewed) + self._upload_api_responses(are_datasets_previewable) self._upload_static_files(_HTML_DIR) self._upload_deploy_viz_metadata_file() diff --git a/package/kedro_viz/launchers/cli.py b/package/kedro_viz/launchers/cli.py index 37491cfce7..c1ef7b9309 100644 --- a/package/kedro_viz/launchers/cli.py +++ b/package/kedro_viz/launchers/cli.py @@ -285,7 +285,7 @@ def build(include_hooks, preview): def create_shareableviz_process( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint=None, bucket_name=None, include_hooks=False, @@ -299,7 +299,7 @@ def create_shareableviz_process( target=load_and_deploy_viz, args=( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint, bucket_name, include_hooks, @@ -375,7 +375,7 @@ def create_shareableviz_process( def load_and_deploy_viz( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint, bucket_name, include_hooks, @@ -391,7 +391,7 @@ def load_and_deploy_viz( # Start the deployment deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name) - deployer.deploy(is_datasets_previewed) + deployer.deploy(are_datasets_previewable) except ( # pylint: disable=catching-non-exception diff --git a/package/kedro_viz/models/flowchart.py b/package/kedro_viz/models/flowchart.py index ff891bc779..d3d5526dac 100644 --- a/package/kedro_viz/models/flowchart.py +++ b/package/kedro_viz/models/flowchart.py @@ -735,7 +735,7 @@ class DataNodeMetadata(GraphNodeMetadata): data_node: DataNode = Field(..., exclude=True) - is_datasets_previewed: ClassVar[bool] = True + are_datasets_previewable: ClassVar[bool] = True type: Optional[str] = Field( default=None, validate_default=True, description="The type of the data node" @@ -779,8 +779,8 @@ def check_data_node_exists(cls, values): return values @classmethod - def set_is_datasets_previewed(cls, value: bool): - cls.is_datasets_previewed = value + def set_are_datasets_previewable(cls, value: bool): + cls.are_datasets_previewable = value @classmethod def set_data_node_and_dataset(cls, data_node): @@ -815,7 +815,7 @@ def set_preview(cls, _): if ( cls.data_node.is_preview_disabled() or not hasattr(cls.dataset, "preview") - or not cls.is_datasets_previewed + or not cls.are_datasets_previewable ): return None @@ -839,7 +839,11 @@ def set_preview(cls, _): @field_validator("preview_type") @classmethod def set_preview_type(cls, _): - if cls.data_node.is_preview_disabled() or not hasattr(cls.dataset, "preview"): + if ( + cls.data_node.is_preview_disabled() + or not hasattr(cls.dataset, "preview") + or not cls.are_datasets_previewable + ): return None try: diff --git a/package/tests/conftest.py b/package/tests/conftest.py index cc77364d74..b280938354 100644 --- a/package/tests/conftest.py +++ b/package/tests/conftest.py @@ -17,7 +17,7 @@ from kedro_viz.data_access import DataAccessManager from kedro_viz.integrations.kedro.hooks import DatasetStatsHook from kedro_viz.integrations.kedro.sqlite_store import SQLiteStore -from kedro_viz.models.flowchart import GraphNode +from kedro_viz.models.flowchart import DataNodeMetadata, GraphNode from kedro_viz.server import populate_data @@ -350,3 +350,8 @@ def pipeline_with_data_sets_mock(): pipeline = mock.MagicMock() pipeline.data_sets.return_value = ["model_inputs#csv"] return pipeline + + +@pytest.fixture(autouse=True) +def reset_are_datasets_previewable(): + DataNodeMetadata.are_datasets_previewable = True diff --git a/package/tests/test_api/test_rest/test_responses.py b/package/tests/test_api/test_rest/test_responses.py index 7464efefd9..d0c393ad2b 100644 --- a/package/tests/test_api/test_rest/test_responses.py +++ b/package/tests/test_api/test_rest/test_responses.py @@ -1031,7 +1031,7 @@ def test_save_api_pipeline_response_to_fs(self, mocker): mock_write_api_response_to_fs.assert_has_calls(expected_calls, any_order=True) @pytest.mark.parametrize( - "file_path, protocol, is_datasets_previewed", + "file_path, protocol, are_datasets_previewable", [ ("s3://shareableviz", "s3", True), ("abfs://shareableviz", "abfs", False), @@ -1039,7 +1039,7 @@ def test_save_api_pipeline_response_to_fs(self, mocker): ], ) def test_save_api_responses_to_fs( - self, file_path, protocol, is_datasets_previewed, mocker + self, file_path, protocol, are_datasets_previewable, mocker ): mock_api_main_response_to_fs = mocker.patch( "kedro_viz.api.rest.responses.save_api_main_response_to_fs" @@ -1055,7 +1055,7 @@ def test_save_api_responses_to_fs( mock_filesystem.return_value.protocol = protocol save_api_responses_to_fs( - file_path, mock_filesystem.return_value, is_datasets_previewed + file_path, mock_filesystem.return_value, are_datasets_previewable ) mock_api_main_response_to_fs.assert_called_once_with( @@ -1064,7 +1064,7 @@ def test_save_api_responses_to_fs( mock_api_node_response_to_fs.assert_called_once_with( f"{file_path}/api/nodes", mock_filesystem.return_value, - is_datasets_previewed, + are_datasets_previewable, ) mock_api_pipeline_response_to_fs.assert_called_once_with( f"{file_path}/api/pipelines", mock_filesystem.return_value diff --git a/package/tests/test_integrations/test_azure_deployer.py b/package/tests/test_integrations/test_azure_deployer.py index 4c524811b4..4b1a8d7a30 100644 --- a/package/tests/test_integrations/test_azure_deployer.py +++ b/package/tests/test_integrations/test_azure_deployer.py @@ -26,7 +26,7 @@ def mock_file_system(mocker): class TestAzureDeployer: - def test_deploy(self, endpoint, bucket_name, mocker): + def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system): deployer = AzureDeployer(endpoint, bucket_name) mocker.patch.object(deployer, "_upload_api_responses") diff --git a/package/tests/test_integrations/test_deployer_factory.py b/package/tests/test_integrations/test_deployer_factory.py index c14794b087..638fe94065 100644 --- a/package/tests/test_integrations/test_deployer_factory.py +++ b/package/tests/test_integrations/test_deployer_factory.py @@ -18,7 +18,8 @@ ("gcp", "http://mocked-url.com", "shareableviz", GCPDeployer), ], ) -def test_create_deployer(platform, endpoint, bucket_name, deployer_class): +def test_create_deployer(platform, endpoint, bucket_name, deployer_class, mocker): + mocker.patch("fsspec.filesystem") deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name) assert isinstance(deployer, deployer_class) assert deployer._endpoint == endpoint diff --git a/package/tests/test_integrations/test_gcp_deployer.py b/package/tests/test_integrations/test_gcp_deployer.py index 56daac7747..86f4f2094e 100755 --- a/package/tests/test_integrations/test_gcp_deployer.py +++ b/package/tests/test_integrations/test_gcp_deployer.py @@ -21,7 +21,7 @@ def mock_file_system(mocker): class TestGCPDeployer: - def test_deploy(self, endpoint, bucket_name, mocker): + def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system): deployer = GCPDeployer(endpoint, bucket_name) mocker.patch.object(deployer, "_upload_api_responses") diff --git a/package/tests/test_launchers/test_cli.py b/package/tests/test_launchers/test_cli.py index 7ff37d9a5a..cc3b2cf780 100755 --- a/package/tests/test_launchers/test_cli.py +++ b/package/tests/test_launchers/test_cli.py @@ -588,7 +588,7 @@ def test_successful_build_with_existing_static_files( @pytest.mark.parametrize( - "platform, is_datasets_previewed, endpoint, bucket_name," + "platform, are_datasets_previewable, endpoint, bucket_name," "include_hooks, process_completed_value", [ ( @@ -645,7 +645,7 @@ def test_successful_build_with_existing_static_files( ) def test_create_shareableviz_process( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint, bucket_name, include_hooks, @@ -659,7 +659,7 @@ def test_create_shareableviz_process( ): mock_process_completed.return_value.value = process_completed_value cli.create_shareableviz_process( - platform, is_datasets_previewed, endpoint, bucket_name, include_hooks + platform, are_datasets_previewable, endpoint, bucket_name, include_hooks ) # Assert the mocks were called as expected @@ -667,7 +667,7 @@ def test_create_shareableviz_process( target=mock_viz_load_and_deploy, args=( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint, bucket_name, include_hooks, @@ -707,7 +707,7 @@ def test_create_shareableviz_process( @pytest.mark.parametrize( - "platform, is_datasets_previewed, endpoint, bucket_name, include_hooks, package_name", + "platform, are_datasets_previewable, endpoint, bucket_name, include_hooks, package_name", [ ( "azure", @@ -731,7 +731,7 @@ def test_create_shareableviz_process( ) def test_load_and_deploy_viz_success( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint, bucket_name, include_hooks, @@ -747,7 +747,7 @@ def test_load_and_deploy_viz_success( cli.load_and_deploy_viz( platform, - is_datasets_previewed, + are_datasets_previewable, endpoint, bucket_name, include_hooks, @@ -762,5 +762,5 @@ def test_load_and_deploy_viz_success( mock_DeployerFactory.create_deployer.assert_called_once_with( platform, endpoint, bucket_name ) - deployer_mock.deploy.assert_called_once_with(is_datasets_previewed) + deployer_mock.deploy.assert_called_once_with(are_datasets_previewable) mock_click_echo.echo.assert_not_called() diff --git a/package/tests/test_models/test_flowchart.py b/package/tests/test_models/test_flowchart.py index a81e4c45c1..f578d0b140 100644 --- a/package/tests/test_models/test_flowchart.py +++ b/package/tests/test_models/test_flowchart.py @@ -394,6 +394,13 @@ def test_is_preview_disabled(self): ) assert data_node.is_preview_disabled() is True + def test_are_datasets_previewable(self, example_data_node): + DataNodeMetadata.set_are_datasets_previewable(False) + preview_node_metadata = DataNodeMetadata(data_node=example_data_node) + + assert preview_node_metadata.preview is None + assert preview_node_metadata.preview_type is None + def test_preview_data_node_metadata(self, example_data_node): expected_preview_data = { "columns": ["id", "company_rating", "company_location"], @@ -405,10 +412,10 @@ def test_preview_data_node_metadata(self, example_data_node): ], } - DataNodeMetadata.set_is_datasets_previewed(True) preview_node_metadata = DataNodeMetadata(data_node=example_data_node) assert preview_node_metadata.preview == expected_preview_data + assert preview_node_metadata.preview_type == "TablePreview" def test_preview_data_node_metadata_exception(self, caplog): empty_dataset = CSVDataset(filepath="temp.csv")