Skip to content

Commit

Permalink
refactor disable preview cli
Browse files Browse the repository at this point in the history
Signed-off-by: ravi-kumar-pilla <[email protected]>
  • Loading branch information
ravi-kumar-pilla committed May 15, 2024
1 parent 85e1a2d commit d8ef988
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 50 deletions.
17 changes: 14 additions & 3 deletions package/kedro_viz/api/rest/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,8 +419,15 @@ def save_api_main_response_to_fs(main_path: str, remote_fs: Any):
raise exc


def save_api_node_response_to_fs(nodes_path: str, remote_fs: Any):
def save_api_node_response_to_fs(
nodes_path: str, remote_fs: Any, is_preview_enabled_for_all_datasets: bool
):
"""Saves API /nodes/{node} response to a directory."""
# Set if preview is enabled/disabled for all data nodes
DataNodeMetadata.set_is_preview_enabled_for_all_datasets(
is_preview_enabled_for_all_datasets
)

for nodeId in data_access_manager.nodes.get_node_ids():
try:
write_api_response_to_fs(
Expand Down Expand Up @@ -451,7 +458,9 @@ def save_api_pipeline_response_to_fs(pipelines_path: str, remote_fs: Any):
raise exc


def save_api_responses_to_fs(path: str, remote_fs: Any):
def save_api_responses_to_fs(
path: str, remote_fs: Any, is_preview_enabled_for_all_datasets: bool
):
"""Saves all Kedro Viz API responses to a directory."""
try:
logger.debug(
Expand All @@ -469,7 +478,9 @@ def save_api_responses_to_fs(path: str, remote_fs: Any):
remote_fs.makedirs(pipelines_path, exist_ok=True)

save_api_main_response_to_fs(main_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs)
save_api_node_response_to_fs(
nodes_path, remote_fs, is_preview_enabled_for_all_datasets
)
save_api_pipeline_response_to_fs(pipelines_path, remote_fs)

except Exception as exc: # pragma: no cover
Expand Down
12 changes: 0 additions & 12 deletions package/kedro_viz/data_access/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ def __init__(self):
self.runs = RunsRepository()
self.tracking_datasets = TrackingDatasetsRepository()
self.dataset_stats = {}
self.is_preview_enabled_for_all_nodes = False

def set_db_session(self, db_session_class: sessionmaker):
"""Set db session on repositories that need it."""
Expand Down Expand Up @@ -128,16 +127,6 @@ def add_dataset_stats(self, stats_dict: Dict):

self.dataset_stats = stats_dict

def add_is_preview_enabled_for_all_nodes(
self, is_preview_enabled_for_all_nodes: bool
):
"""Add the preview flag to the data access manager.
Args:
is_preview_enabled_for_all_nodes: A boolean flag to indicate whether the preview is enabled for all the nodes.
"""

self.is_preview_enabled_for_all_nodes = is_preview_enabled_for_all_nodes

def get_stats_for_data_node(self, data_node_name: str) -> Union[Dict, None]:
"""Returns the dataset statistics for the data node if found
Expand Down Expand Up @@ -315,7 +304,6 @@ def add_dataset(
dataset=obj,
stats=self.get_stats_for_data_node(_strip_transcoding(dataset_name)),
is_free_input=is_free_input,
is_preview_enabled_for_all_nodes=self.is_preview_enabled_for_all_nodes,
)
graph_node = self.nodes.add_node(graph_node)
graph_node.add_pipeline(registered_pipeline_id)
Expand Down
10 changes: 6 additions & 4 deletions package/kedro_viz/integrations/deployment/base_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,11 @@ def __init__(self):
self._path = None
self._fs = None

def _upload_api_responses(self):
def _upload_api_responses(self, is_preview_enabled_for_all_datasets: bool):
"""Write API responses to the build."""
save_api_responses_to_fs(self._path, self._fs)
save_api_responses_to_fs(
self._path, self._fs, is_preview_enabled_for_all_datasets
)

def _ingest_heap_analytics(self):
"""Ingest heap analytics to index file in the build."""
Expand Down Expand Up @@ -98,9 +100,9 @@ def _upload_deploy_viz_metadata_file(self):
logger.exception("Upload failed: %s ", exc)
raise exc

def deploy(self):
def deploy(self, is_preview_enabled_for_all_datasets: bool = False):
"""Create and deploy all static files to local/remote file system"""

self._upload_api_responses()
self._upload_api_responses(is_preview_enabled_for_all_datasets)
self._upload_static_files(_HTML_DIR)
self._upload_deploy_viz_metadata_file()
10 changes: 1 addition & 9 deletions package/kedro_viz/launchers/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,11 +115,6 @@ def viz(ctx): # pylint: disable=unused-argument
help=PARAMS_ARG_HELP,
callback=_split_params,
)
@click.option(
"--preview",
default=True,
help="A flag to enable/disable a quick preview of node datasets.",
)
# pylint: disable=import-outside-toplevel, too-many-locals
def run(
host,
Expand All @@ -132,7 +127,6 @@ def run(
autoreload,
include_hooks,
params,
preview,
):
"""Launch local Kedro Viz instance"""
from kedro_viz.server import run_server
Expand Down Expand Up @@ -176,7 +170,6 @@ def run(
"include_hooks": include_hooks,
"package_name": PACKAGE_NAME,
"extra_params": params,
"preview": preview,
}
if autoreload:
run_process_kwargs = {
Expand Down Expand Up @@ -384,14 +377,13 @@ def load_and_deploy_viz(
try:
load_and_populate_data(
Path.cwd(),
is_preview_enabled_for_all_nodes=preview,
include_hooks=include_hooks,
package_name=package_name,
)

# Start the deployment
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
deployer.deploy()
deployer.deploy(preview)

except (
# pylint: disable=catching-non-exception
Expand Down
26 changes: 13 additions & 13 deletions package/kedro_viz/models/flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from enum import Enum
from pathlib import Path
from types import FunctionType
from typing import Any, Dict, List, Optional, Set, Union, cast
from typing import Any, ClassVar, Dict, List, Optional, Set, Union, cast

from kedro.pipeline.node import Node as KedroNode

Expand Down Expand Up @@ -227,7 +227,6 @@ def create_data_node(
tags: Set[str],
dataset: AbstractDataset,
stats: Optional[Dict],
is_preview_enabled_for_all_nodes: bool,
is_free_input: bool = False,
) -> Union["DataNode", "TranscodedDataNode"]:
"""Create a graph node of type data for a given Kedro Dataset instance.
Expand All @@ -240,7 +239,6 @@ def create_data_node(
dataset: A dataset in a Kedro pipeline.
stats: The dictionary of dataset statistics, e.g.
{"rows":2, "columns":3, "file_size":100}
is_preview_enabled_for_all_nodes: A flag to enable/disable a quick preview of node datasets.
is_free_input: Whether the dataset is a free input in the pipeline
Returns:
An instance of DataNode.
Expand All @@ -265,7 +263,6 @@ def create_data_node(
kedro_obj=dataset,
is_free_input=is_free_input,
stats=stats,
is_preview_enabled_for_all_nodes=is_preview_enabled_for_all_nodes,
)

@classmethod
Expand Down Expand Up @@ -566,7 +563,6 @@ class DataNode(GraphNode):
layer (Optional[str]): The layer that this data node belongs to. Defaults to `None`.
is_free_input (bool): Determines whether the data node is a free input. Defaults to `False`.
stats (Optional[Dict]): Statistics for the data node. Defaults to `None`.
is_preview_enabled_for_all_nodes (bool): A flag to enable/disable a quick preview of node datasets. Defaults to `False`.
Raises:
AssertionError: If kedro_obj, name are not supplied during instantiation
Expand All @@ -580,10 +576,6 @@ class DataNode(GraphNode):
)
stats: Optional[Dict] = Field(None, description="The statistics for the data node.")

is_preview_enabled_for_all_nodes: bool = Field(
False, description="A flag to enable/disable a quick preview of node datasets."
)

dataset_type: Optional[str] = Field(
default=None,
validate_default=True,
Expand Down Expand Up @@ -656,9 +648,7 @@ def get_preview_args(self):
def is_preview_disabled(self):
"""Checks if the dataset has a preview disabled"""
return (
self.is_preview_enabled_for_all_nodes is False
or self.viz_metadata is not None
and self.viz_metadata.get("preview") is False
self.viz_metadata is not None and self.viz_metadata.get("preview") is False
)


Expand Down Expand Up @@ -745,6 +735,8 @@ class DataNodeMetadata(GraphNodeMetadata):

data_node: DataNode = Field(..., exclude=True)

is_preview_enabled_for_all_datasets: ClassVar[bool] = False

type: Optional[str] = Field(
default=None, validate_default=True, description="The type of the data node"
)
Expand Down Expand Up @@ -786,6 +778,10 @@ def check_data_node_exists(cls, values):
cls.set_data_node_and_dataset(values["data_node"])
return values

@classmethod
def set_is_preview_enabled_for_all_datasets(cls, value: bool):
cls.is_preview_enabled_for_all_datasets = value

@classmethod
def set_data_node_and_dataset(cls, data_node):
cls.data_node = data_node
Expand Down Expand Up @@ -816,7 +812,11 @@ def set_run_command(cls, _):
@field_validator("preview")
@classmethod
def set_preview(cls, _):
if cls.data_node.is_preview_disabled() or not hasattr(cls.dataset, "preview"):
if (
cls.data_node.is_preview_disabled()
or not hasattr(cls.dataset, "preview")
or not cls.is_preview_enabled_for_all_datasets
):
return None

try:
Expand Down
9 changes: 0 additions & 9 deletions package/kedro_viz/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ def populate_data(
pipelines: Dict[str, Pipeline],
session_store: BaseSessionStore,
stats_dict: Dict,
is_preview_enabled_for_all_nodes: bool,
): # pylint: disable=redefined-outer-name
"""Populate data repositories. Should be called once on application start
if creating an api app from project.
Expand All @@ -41,10 +40,6 @@ def populate_data(
session_class = make_db_session_factory(session_store.location)
data_access_manager.set_db_session(session_class)

data_access_manager.add_is_preview_enabled_for_all_nodes(
is_preview_enabled_for_all_nodes
)

data_access_manager.add_catalog(catalog, pipelines)

# add dataset stats before adding pipelines as the data nodes
Expand All @@ -56,7 +51,6 @@ def populate_data(

def load_and_populate_data(
path: Path,
is_preview_enabled_for_all_nodes: bool,
env: Optional[str] = None,
include_hooks: bool = False,
package_name: Optional[str] = None,
Expand Down Expand Up @@ -87,14 +81,12 @@ def load_and_populate_data(
pipelines,
session_store,
stats_dict,
is_preview_enabled_for_all_nodes,
)


def run_server(
host: str = DEFAULT_HOST,
port: int = DEFAULT_PORT,
preview: bool = True,
load_file: Optional[str] = None,
save_file: Optional[str] = None,
pipeline_name: Optional[str] = None,
Expand Down Expand Up @@ -133,7 +125,6 @@ def run_server(
if load_file is None:
load_and_populate_data(
path,
preview,
env,
include_hooks,
package_name,
Expand Down

0 comments on commit d8ef988

Please sign in to comment.