Skip to content

Commit

Permalink
Merge branch 'main' into docs/disable-preview
Browse files Browse the repository at this point in the history
  • Loading branch information
jitu5 authored Jun 4, 2024
2 parents ee44d28 + 8fe5fa4 commit c873015
Show file tree
Hide file tree
Showing 15 changed files with 160 additions and 48 deletions.
2 changes: 2 additions & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ Please follow the established format:
- Introduce the toggle to expand and collapse all pipelines button in the utility bar. (#1858)
- Allow Kedro-Viz commands to run from any sub directory within Kedro project. (#1871)

- Enable/disable preview for all the datasets when publishing Kedro-Viz from CLI. (#1894)

## Bug fixes and other changes
- Fix broken URL when active pipeline name changes on initial load. (#1914)
- Fix bug related to tag filtering and sharing with stateful URL. (#1878)
Expand Down
11 changes: 8 additions & 3 deletions package/kedro_viz/api/rest/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,8 +420,13 @@ def save_api_main_response_to_fs(main_path: str, remote_fs: Any):
raise exc


def save_api_node_response_to_fs(nodes_path: str, remote_fs: Any):
def save_api_node_response_to_fs(
nodes_path: str, remote_fs: Any, is_all_previews_enabled: bool
):
"""Saves API /nodes/{node} response to a directory."""
# Set if preview is enabled/disabled for all data nodes
DataNodeMetadata.set_is_all_previews_enabled(is_all_previews_enabled)

for nodeId in data_access_manager.nodes.get_node_ids():
try:
write_api_response_to_fs(
Expand Down Expand Up @@ -452,7 +457,7 @@ def save_api_pipeline_response_to_fs(pipelines_path: str, remote_fs: Any):
raise exc


def save_api_responses_to_fs(path: str, remote_fs: Any):
def save_api_responses_to_fs(path: str, remote_fs: Any, is_all_previews_enabled: bool):
"""Saves all Kedro Viz API responses to a directory."""
try:
logger.debug(
Expand All @@ -470,7 +475,7 @@ def save_api_responses_to_fs(path: str, remote_fs: Any):
remote_fs.makedirs(pipelines_path, exist_ok=True)

save_api_main_response_to_fs(main_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs, is_all_previews_enabled)
save_api_pipeline_response_to_fs(pipelines_path, remote_fs)

except Exception as exc: # pragma: no cover
Expand Down
8 changes: 4 additions & 4 deletions package/kedro_viz/integrations/deployment/base_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ def __init__(self):
self._path = None
self._fs = None

def _upload_api_responses(self):
def _upload_api_responses(self, is_all_previews_enabled: bool):
"""Write API responses to the build."""
save_api_responses_to_fs(self._path, self._fs)
save_api_responses_to_fs(self._path, self._fs, is_all_previews_enabled)

def _ingest_heap_analytics(self):
"""Ingest heap analytics to index file in the build."""
Expand Down Expand Up @@ -98,9 +98,9 @@ def _upload_deploy_viz_metadata_file(self):
logger.exception("Upload failed: %s ", exc)
raise exc

def deploy(self):
def deploy(self, is_all_previews_enabled: bool = False):
"""Create and deploy all static files to local/remote file system"""

self._upload_api_responses()
self._upload_api_responses(is_all_previews_enabled)
self._upload_static_files(_HTML_DIR)
self._upload_deploy_viz_metadata_file()
34 changes: 28 additions & 6 deletions package/kedro_viz/launchers/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,12 @@ def run(
is_flag=True,
help="A flag to include all registered hooks in your Kedro Project",
)
def deploy(platform, endpoint, bucket_name, include_hooks):
@click.option(
"--include-preview",
is_flag=True,
help="Enable/disable preview for all the datasets.",
)
def deploy(platform, endpoint, bucket_name, include_hooks, include_preview):
"""Deploy and host Kedro Viz on provided platform"""
if not platform or platform.lower() not in SHAREABLEVIZ_SUPPORTED_PLATFORMS:
display_cli_message(
Expand All @@ -252,7 +257,13 @@ def deploy(platform, endpoint, bucket_name, include_hooks):
)
return

create_shareableviz_process(platform, endpoint, bucket_name, include_hooks)
create_shareableviz_process(
platform,
include_preview,
endpoint,
bucket_name,
include_hooks,
)


@viz.command(context_settings={"help_option_names": ["-h", "--help"]})
Expand All @@ -261,14 +272,23 @@ def deploy(platform, endpoint, bucket_name, include_hooks):
is_flag=True,
help="A flag to include all registered hooks in your Kedro Project",
)
def build(include_hooks):
@click.option(
"--include-preview",
is_flag=True,
help="Enable/disable preview for all the datasets.",
)
def build(include_hooks, include_preview):
"""Create build directory of local Kedro Viz instance with Kedro project data"""

create_shareableviz_process("local", include_hooks=include_hooks)
create_shareableviz_process("local", include_preview, include_hooks=include_hooks)


def create_shareableviz_process(
platform, endpoint=None, bucket_name=None, include_hooks=False
platform,
is_all_previews_enabled,
endpoint=None,
bucket_name=None,
include_hooks=False,
):
"""Creates platform specific deployer process"""
try:
Expand All @@ -279,6 +299,7 @@ def create_shareableviz_process(
target=load_and_deploy_viz,
args=(
platform,
is_all_previews_enabled,
endpoint,
bucket_name,
include_hooks,
Expand Down Expand Up @@ -354,6 +375,7 @@ def create_shareableviz_process(

def load_and_deploy_viz(
platform,
is_all_previews_enabled,
endpoint,
bucket_name,
include_hooks,
Expand All @@ -369,7 +391,7 @@ def load_and_deploy_viz(

# Start the deployment
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
deployer.deploy()
deployer.deploy(is_all_previews_enabled)

except (
# pylint: disable=catching-non-exception
Expand Down
31 changes: 25 additions & 6 deletions package/kedro_viz/models/flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from enum import Enum
from pathlib import Path
from types import FunctionType
from typing import Any, Dict, List, Optional, Set, Union, cast
from typing import Any, ClassVar, Dict, List, Optional, Set, Union, cast

from kedro.pipeline.node import Node as KedroNode
from pydantic import (
Expand Down Expand Up @@ -638,10 +638,10 @@ def get_preview_args(self):
"""Gets the preview arguments for a dataset"""
return self.viz_metadata.get("preview_args", None)

def is_preview_disabled(self):
"""Checks if the dataset has a preview disabled"""
def is_preview_enabled(self):
"""Checks if the dataset has a preview enabled at the node level."""
return (
self.viz_metadata is not None and self.viz_metadata.get("preview") is False
self.viz_metadata is None or self.viz_metadata.get("preview") is not False
)


Expand Down Expand Up @@ -722,12 +722,19 @@ class DataNodeMetadata(GraphNodeMetadata):
Args:
data_node (DataNode): Data node to which this metadata belongs to.
Attributes:
is_all_previews_enabled (bool): Class-level attribute to determine if
previews are enabled for all nodes. This can be configured via CLI
or UI to manage the preview settings.
Raises:
AssertionError: If data_node is not supplied during instantiation
"""

data_node: DataNode = Field(..., exclude=True)

is_all_previews_enabled: ClassVar[bool] = True

type: Optional[str] = Field(
default=None, validate_default=True, description="The type of the data node"
)
Expand Down Expand Up @@ -769,6 +776,10 @@ def check_data_node_exists(cls, values):
cls.set_data_node_and_dataset(values["data_node"])
return values

@classmethod
def set_is_all_previews_enabled(cls, value: bool):
cls.is_all_previews_enabled = value

@classmethod
def set_data_node_and_dataset(cls, data_node):
cls.data_node = data_node
Expand Down Expand Up @@ -799,7 +810,11 @@ def set_run_command(cls, _):
@field_validator("preview")
@classmethod
def set_preview(cls, _):
if cls.data_node.is_preview_disabled() or not hasattr(cls.dataset, "preview"):
if (
not cls.data_node.is_preview_enabled()
or not hasattr(cls.dataset, "preview")
or not cls.is_all_previews_enabled
):
return None

try:
Expand All @@ -822,7 +837,11 @@ def set_preview(cls, _):
@field_validator("preview_type")
@classmethod
def set_preview_type(cls, _):
if cls.data_node.is_preview_disabled() or not hasattr(cls.dataset, "preview"):
if (
not cls.data_node.is_preview_enabled()
or not hasattr(cls.dataset, "preview")
or not cls.is_all_previews_enabled
):
return None

try:
Expand Down
5 changes: 3 additions & 2 deletions package/kedro_viz/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,10 @@ def run_server(
pipeline_name,
extra_params,
)

# [TODO: As we can do this with `kedro viz build`,
# we need to shift this feature outside of kedro viz run]
if save_file:
save_api_responses_to_fs(save_file, fsspec.filesystem("file"))
save_api_responses_to_fs(save_file, fsspec.filesystem("file"), True)

app = apps.create_api_app_from_project(path, autoreload)
else:
Expand Down
7 changes: 6 additions & 1 deletion package/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from kedro_viz.data_access import DataAccessManager
from kedro_viz.integrations.kedro.hooks import DatasetStatsHook
from kedro_viz.integrations.kedro.sqlite_store import SQLiteStore
from kedro_viz.models.flowchart import GraphNode
from kedro_viz.models.flowchart import DataNodeMetadata, GraphNode
from kedro_viz.server import populate_data


Expand Down Expand Up @@ -350,3 +350,8 @@ def pipeline_with_data_sets_mock():
pipeline = mock.MagicMock()
pipeline.data_sets.return_value = ["model_inputs#csv"]
return pipeline


@pytest.fixture(autouse=True)
def reset_is_all_previews_enabled():
DataNodeMetadata.is_all_previews_enabled = True
22 changes: 14 additions & 8 deletions package/tests/test_api/test_rest/test_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -979,7 +979,7 @@ def test_save_api_node_response_to_fs(self, mocker):
)
remote_fs = Mock()

save_api_node_response_to_fs(nodes_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs, False)

assert mock_write_api_response_to_fs.call_count == len(nodeIds)
assert mock_get_node_metadata_response.call_count == len(nodeIds)
Expand Down Expand Up @@ -1031,14 +1031,16 @@ def test_save_api_pipeline_response_to_fs(self, mocker):
mock_write_api_response_to_fs.assert_has_calls(expected_calls, any_order=True)

@pytest.mark.parametrize(
"file_path, protocol",
"file_path, protocol, is_all_previews_enabled",
[
("s3://shareableviz", "s3"),
("abfs://shareableviz", "abfs"),
("shareableviz", "file"),
("s3://shareableviz", "s3", True),
("abfs://shareableviz", "abfs", False),
("shareableviz", "file", True),
],
)
def test_save_api_responses_to_fs(self, file_path, protocol, mocker):
def test_save_api_responses_to_fs(
self, file_path, protocol, is_all_previews_enabled, mocker
):
mock_api_main_response_to_fs = mocker.patch(
"kedro_viz.api.rest.responses.save_api_main_response_to_fs"
)
Expand All @@ -1052,13 +1054,17 @@ def test_save_api_responses_to_fs(self, file_path, protocol, mocker):
mock_filesystem = mocker.patch("fsspec.filesystem")
mock_filesystem.return_value.protocol = protocol

save_api_responses_to_fs(file_path, mock_filesystem.return_value)
save_api_responses_to_fs(
file_path, mock_filesystem.return_value, is_all_previews_enabled
)

mock_api_main_response_to_fs.assert_called_once_with(
f"{file_path}/api/main", mock_filesystem.return_value
)
mock_api_node_response_to_fs.assert_called_once_with(
f"{file_path}/api/nodes", mock_filesystem.return_value
f"{file_path}/api/nodes",
mock_filesystem.return_value,
is_all_previews_enabled,
)
mock_api_pipeline_response_to_fs.assert_called_once_with(
f"{file_path}/api/pipelines", mock_filesystem.return_value
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_azure_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def mock_file_system(mocker):


class TestAzureDeployer:
def test_deploy(self, endpoint, bucket_name, mocker):
def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system):
deployer = AzureDeployer(endpoint, bucket_name)

mocker.patch.object(deployer, "_upload_api_responses")
Expand Down
6 changes: 4 additions & 2 deletions package/tests/test_integrations/test_base_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,11 @@ def test_upload_api_responses(self, mocker):
"kedro_viz.integrations.deployment.base_deployer.save_api_responses_to_fs"
)
build = ConcreteBaseDeployer()
build._upload_api_responses()
build._upload_api_responses(False)

save_api_responses_to_fs_mock.assert_called_once_with(build._path, build._fs)
save_api_responses_to_fs_mock.assert_called_once_with(
build._path, build._fs, False
)

def test_upload_static_files(self, mocker):
mocker.patch("fsspec.filesystem")
Expand Down
3 changes: 2 additions & 1 deletion package/tests/test_integrations/test_deployer_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
("gcp", "http://mocked-url.com", "shareableviz", GCPDeployer),
],
)
def test_create_deployer(platform, endpoint, bucket_name, deployer_class):
def test_create_deployer(platform, endpoint, bucket_name, deployer_class, mocker):
mocker.patch("fsspec.filesystem")
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
assert isinstance(deployer, deployer_class)
assert deployer._endpoint == endpoint
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_gcp_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def mock_file_system(mocker):


class TestGCPDeployer:
def test_deploy(self, endpoint, bucket_name, mocker):
def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system):
deployer = GCPDeployer(endpoint, bucket_name)

mocker.patch.object(deployer, "_upload_api_responses")
Expand Down
Loading

0 comments on commit c873015

Please sign in to comment.