Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds checks for applicable types to YAMLData* #821

Merged
merged 3 commits into from
Apr 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions hamilton/plugins/yaml_extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
from hamilton.io.data_adapters import DataLoader, DataSaver
from hamilton.io.utils import get_file_metadata

PrimitiveType = Union[str, int, bool, dict, list]
PrimitiveTypes = str, int, float, bool, dict, list
AcceptedTypes = Union[PrimitiveTypes]


@dataclasses.dataclass
Expand All @@ -20,13 +21,13 @@ class YAMLDataLoader(DataLoader):

@classmethod
def applicable_types(cls) -> Collection[Type]:
return [str, int, bool, dict, list]
return [*PrimitiveTypes]

@classmethod
def name(cls) -> str:
return "yaml"

def load_data(self, type_: Type) -> Tuple[PrimitiveType, Dict[str, Any]]:
def load_data(self, type_: Type) -> Tuple[AcceptedTypes, Dict[str, Any]]:
path = self.path
if isinstance(self.path, str):
path = pathlib.Path(self.path)
Expand All @@ -41,13 +42,13 @@ class YAMLDataSaver(DataSaver):

@classmethod
def applicable_types(cls) -> Collection[Type]:
return [str, int, bool, dict, list]
return [*PrimitiveTypes]

@classmethod
def name(cls) -> str:
return "yaml"

def save_data(self, data: Any) -> Dict[str, Any]:
def save_data(self, data: AcceptedTypes) -> Dict[str, Any]:
path = self.path
if isinstance(path, str):
path = pathlib.Path(path)
Expand Down
1 change: 1 addition & 0 deletions requirements-docs.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ pillow
polars
pyarrow >= 1.0.0
pyspark
PyYAML
ray
readthedocs-sphinx-ext<2.3 # read the docs pins
recommonmark==0.5.0 # read the docs pins
Expand Down
1 change: 1 addition & 0 deletions requirements-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ pyarrow
pyreadstat # for SPSS data loader
pytest
pytest-cov
PyYAML
scikit-learn
sqlalchemy==1.4.49; python_version == '3.7.*'
sqlalchemy; python_version >= '3.8'
Expand Down
25 changes: 24 additions & 1 deletion tests/plugins/test_yaml_extension.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import pathlib
from typing import List, Type

import pytest
import yaml

from hamilton.plugins.yaml_extensions import YAMLDataLoader, YAMLDataSaver
from hamilton.function_modifiers.adapters import resolve_adapter_class
from hamilton.io.data_adapters import DataLoader, DataSaver
from hamilton.plugins.yaml_extensions import PrimitiveTypes, YAMLDataLoader, YAMLDataSaver

TEST_DATA_FOR_YAML = [
(1, "int.yaml"),
Expand Down Expand Up @@ -46,3 +49,23 @@ def test_yaml_loader_and_saver(tmp_path: pathlib.Path, data, file_name):
loader = YAMLDataLoader(path)
loaded_data = loader.load_data(type(data))
assert data == loaded_data[0]


@pytest.mark.parametrize(
"type_,classes,correct_class",
[(t, [YAMLDataLoader], YAMLDataLoader) for t in PrimitiveTypes],
)
def test_resolve_correct_loader_class(
type_: Type[Type], classes: List[Type[DataLoader]], correct_class: Type[DataLoader]
):
assert resolve_adapter_class(type_, classes) == correct_class


@pytest.mark.parametrize(
"type_,classes,correct_class",
[(t, [YAMLDataSaver], YAMLDataSaver) for t in PrimitiveTypes],
)
def test_resolve_correct_saver_class(
type_: Type[Type], classes: List[Type[DataSaver]], correct_class: Type[DataLoader]
):
assert resolve_adapter_class(type_, classes) == correct_class