Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor Flowchart models from dataclass to pydantic base models #1565

Merged
merged 41 commits into from
Nov 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
a2636f3
create pydantic flowchart classes
ravi-kumar-pilla Oct 2, 2023
240a383
fixing pydantic class conversions
ravi-kumar-pilla Oct 5, 2023
772ed02
minor modifications and code comments
ravi-kumar-pilla Oct 7, 2023
900fb70
fix pytests for flowchart and managers
ravi-kumar-pilla Oct 10, 2023
57811a7
fix pytests
ravi-kumar-pilla Oct 10, 2023
182be16
fix lint errors and pytests
ravi-kumar-pilla Oct 11, 2023
62efd72
Merge branch 'main' of https:/kedro-org/kedro-viz into ch…
ravi-kumar-pilla Oct 11, 2023
47cc4d7
Merge branch 'main' of https:/kedro-org/kedro-viz into ch…
ravi-kumar-pilla Oct 11, 2023
d82f71d
fix lint issues and merge main
ravi-kumar-pilla Oct 11, 2023
fe67d26
fix initialization issues and lint issues
ravi-kumar-pilla Oct 11, 2023
47d3211
fix push issue
ravi-kumar-pilla Oct 11, 2023
1696c81
Merge branch 'main' into chore/refactor-data-classes
ravi-kumar-pilla Oct 12, 2023
c30e102
revert class method to static method
ravi-kumar-pilla Oct 16, 2023
2406458
merge main
ravi-kumar-pilla Oct 16, 2023
e620bcb
Merge branch 'chore/refactor-data-classes' of https:/kedr…
ravi-kumar-pilla Oct 16, 2023
5ec1451
update method comments
ravi-kumar-pilla Oct 16, 2023
15855e2
Merge branch 'main' of https:/kedro-org/kedro-viz into ch…
ravi-kumar-pilla Oct 16, 2023
5c97bbf
Merge branch 'main' into chore/refactor-data-classes
ravi-kumar-pilla Oct 16, 2023
198e946
Merge branch 'chore/refactor-data-classes' of https:/kedr…
ravi-kumar-pilla Oct 17, 2023
3175df5
revert back method shuffles for better PR reviews
ravi-kumar-pilla Oct 17, 2023
38d0029
addressing PR comments
ravi-kumar-pilla Oct 17, 2023
c82e04c
merge main
ravi-kumar-pilla Oct 26, 2023
b106ad8
not-working version of pydantic shift
ravi-kumar-pilla Oct 26, 2023
1a95fd3
non-working p2
ravi-kumar-pilla Oct 31, 2023
844531d
not-working v3
ravi-kumar-pilla Nov 1, 2023
20bf139
modify metadata classes and update pytests
ravi-kumar-pilla Nov 3, 2023
a7564bd
merge main
ravi-kumar-pilla Nov 6, 2023
c526956
fix all pytests
ravi-kumar-pilla Nov 7, 2023
f61deb9
fix pytest for coverage
ravi-kumar-pilla Nov 7, 2023
ff39824
Merge branch 'main' into chore/refactor-data-classes
ravi-kumar-pilla Nov 7, 2023
d7e2653
Merge branch 'main' into chore/refactor-data-classes
ravi-kumar-pilla Nov 7, 2023
74b2930
address PR comments1
ravi-kumar-pilla Nov 7, 2023
b0e34ef
Merge branch 'chore/refactor-data-classes' of https:/kedr…
ravi-kumar-pilla Nov 7, 2023
ca6b16f
fix lint issues
ravi-kumar-pilla Nov 7, 2023
2970e9e
add hash for pylint
ravi-kumar-pilla Nov 7, 2023
50c9d08
merge main
ravi-kumar-pilla Nov 7, 2023
80848d5
fix lint errors
ravi-kumar-pilla Nov 7, 2023
5cc645a
fix lint errors
ravi-kumar-pilla Nov 7, 2023
8e53a76
Merge branch 'main' into chore/refactor-data-classes
tynandebold Nov 13, 2023
6ce4e37
create base class for tag and registered pipeline
ravi-kumar-pilla Nov 14, 2023
bb51814
add release note
ravi-kumar-pilla Nov 14, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ Please follow the established format:
- Fix dataset factory patterns in Experiment Tracking. (#1588)
- Improved feedback for copy to clipboard feature. (#1614)
- Ensure Kedro-Viz works when hosted on a URL subpath. (#1621)
- Refactor flowchart dataclasses to pydantic base models. (#1565)

# Release 6.6.1

Expand Down
2 changes: 1 addition & 1 deletion package/.pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs=1

# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=pylint.extensions.docparams
load-plugins=pylint.extensions.docparams,pylint_pydantic

# Pickle collected data for later comparisons.
persistent=yes
Expand Down
3 changes: 1 addition & 2 deletions package/kedro_viz/api/graphql/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,9 @@

from strawberry import ID

from kedro_viz.api.graphql.types import Run
from kedro_viz.models.experiment_tracking import RunModel, UserRunDetailsModel

from .types import Run


def format_run(
run_id: str, run_blob: Dict, user_run_details: Optional[UserRunDetailsModel] = None
Expand Down
22 changes: 11 additions & 11 deletions package/kedro_viz/api/rest/responses.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""`kedro_viz.api.rest.responses` defines REST response types."""
# pylint: disable=missing-class-docstring,too-few-public-methods,invalid-name
# pylint: disable=missing-class-docstring,invalid-name
import abc
import logging
from typing import Any, Dict, List, Optional, Union
Expand Down Expand Up @@ -310,18 +310,18 @@ def get_default_response() -> GraphAPIResponse:
)

return GraphAPIResponse(
nodes=data_access_manager.get_nodes_for_registered_pipeline( # type: ignore
nodes=data_access_manager.get_nodes_for_registered_pipeline(
default_selected_pipeline_id
),
edges=data_access_manager.get_edges_for_registered_pipeline( # type: ignore
edges=data_access_manager.get_edges_for_registered_pipeline(
default_selected_pipeline_id
),
tags=data_access_manager.tags.as_list(),
layers=data_access_manager.get_sorted_layers_for_registered_pipeline(
default_selected_pipeline_id
),
pipelines=data_access_manager.registered_pipelines.as_list(),
modular_pipelines=modular_pipelines_tree, # type: ignore
modular_pipelines=modular_pipelines_tree,
selected_pipeline=default_selected_pipeline_id,
)

Expand All @@ -336,15 +336,15 @@ def get_node_metadata_response(node_id: str):
return JSONResponse(content={})

if isinstance(node, TaskNode):
return TaskNodeMetadata(node)
return TaskNodeMetadata(task_node=node)

if isinstance(node, DataNode):
return DataNodeMetadata(node)
return DataNodeMetadata(data_node=node)

if isinstance(node, TranscodedDataNode):
return TranscodedDataNodeMetadata(node)
return TranscodedDataNodeMetadata(transcoded_data_node=node)

return ParametersNodeMetadata(node)
return ParametersNodeMetadata(parameters_node=node)


def get_selected_pipeline_response(registered_pipeline_id: str):
Expand All @@ -361,10 +361,10 @@ def get_selected_pipeline_response(registered_pipeline_id: str):
)

return GraphAPIResponse(
nodes=data_access_manager.get_nodes_for_registered_pipeline( # type: ignore
nodes=data_access_manager.get_nodes_for_registered_pipeline(
registered_pipeline_id
),
edges=data_access_manager.get_edges_for_registered_pipeline( # type: ignore
edges=data_access_manager.get_edges_for_registered_pipeline(
registered_pipeline_id
),
tags=data_access_manager.tags.as_list(),
Expand All @@ -373,7 +373,7 @@ def get_selected_pipeline_response(registered_pipeline_id: str):
),
pipelines=data_access_manager.registered_pipelines.as_list(),
selected_pipeline=registered_pipeline_id,
modular_pipelines=modular_pipelines_tree, # type: ignore
modular_pipelines=modular_pipelines_tree,
)


Expand Down
18 changes: 11 additions & 7 deletions package/kedro_viz/data_access/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def add_pipeline(self, registered_pipeline_id: str, pipeline: KedroPipeline):
task_node = self.add_node(registered_pipeline_id, node)
self.registered_pipelines.add_node(registered_pipeline_id, task_node.id)

current_modular_pipeline = modular_pipelines.extract_from_node(task_node)
current_modular_pipeline_id = modular_pipelines.extract_from_node(task_node)

# Add node's inputs as DataNode to the graph
for input_ in node.inputs:
Expand All @@ -169,8 +169,8 @@ def add_pipeline(self, registered_pipeline_id: str, pipeline: KedroPipeline):
# The method `add_input` will take care of figuring out whether
# it is an internal or external input of the modular pipeline.
modular_pipelines.extract_from_node(input_node)
if current_modular_pipeline is not None:
modular_pipelines.add_input(current_modular_pipeline, input_node)
if current_modular_pipeline_id is not None:
modular_pipelines.add_input(current_modular_pipeline_id, input_node)

# Add node outputs as DataNode to the graph.
# It follows similar logic to adding inputs.
Expand All @@ -186,8 +186,10 @@ def add_pipeline(self, registered_pipeline_id: str, pipeline: KedroPipeline):
output_node.original_version = self.catalog.get_dataset(output)

modular_pipelines.extract_from_node(output_node)
if current_modular_pipeline is not None:
modular_pipelines.add_output(current_modular_pipeline, output_node)
if current_modular_pipeline_id is not None:
modular_pipelines.add_output(
current_modular_pipeline_id, output_node
)

def add_node(self, registered_pipeline_id: str, node: KedroNode) -> TaskNode:
"""Add a Kedro node as a TaskNode to the NodesRepository
Expand Down Expand Up @@ -476,7 +478,9 @@ def create_modular_pipelines_tree_for_registered_pipeline(
bad_inputs = modular_pipeline.inputs.intersection(descendants)
for bad_input in bad_inputs:
digraph.remove_edge(bad_input, modular_pipeline_id)
edges.remove_edge(GraphEdge(bad_input, modular_pipeline_id))
edges.remove_edge(
GraphEdge(source=bad_input, target=modular_pipeline_id)
)
node_dependencies[bad_input].remove(modular_pipeline_id)

for node_id, node in self.nodes.as_dict().items():
Expand All @@ -488,7 +492,7 @@ def create_modular_pipelines_tree_for_registered_pipeline(
if not node.modular_pipelines or node_id in root_parameters:
modular_pipelines_tree[ROOT_MODULAR_PIPELINE_ID].children.add(
ModularPipelineChild(
node_id, self.nodes.get_node_by_id(node_id).type
id=node_id, type=self.nodes.get_node_by_id(node_id).type
)
)

Expand Down
Loading