Skip to content

Commit

Permalink
fix(merge): resolve merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
mathysgrapotte committed Jan 29, 2025
2 parents 305f3cf + 30fbd3f commit 5e6faa0
Show file tree
Hide file tree
Showing 12 changed files with 224 additions and 45 deletions.
2 changes: 1 addition & 1 deletion src/stimulus/cli/split_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import yaml

from stimulus.data.data_handlers import DatasetProcessor, SplitManager
from stimulus.data.experiments import SplitLoader
from stimulus.data.loaders import SplitLoader
from stimulus.utils.yaml_data import YamlSubConfigDict


Expand Down
2 changes: 1 addition & 1 deletion src/stimulus/cli/transform_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import yaml

from stimulus.data.data_handlers import DatasetProcessor, TransformManager
from stimulus.data.experiments import TransformLoader
from stimulus.data.loaders import TransformLoader
from stimulus.utils.yaml_data import YamlSubConfigDict


Expand Down
10 changes: 5 additions & 5 deletions src/stimulus/data/data_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import torch
import yaml

from stimulus.data import experiments
from stimulus.data import loaders
from stimulus.utils import yaml_data


Expand Down Expand Up @@ -157,7 +157,7 @@ class EncodeManager:

def __init__(
self,
encoder_loader: experiments.EncoderLoader,
encoder_loader: loaders.EncoderLoader,
) -> None:
"""Initialize the EncodeManager.
Expand Down Expand Up @@ -219,7 +219,7 @@ class TransformManager:

def __init__(
self,
transform_loader: experiments.TransformLoader,
transform_loader: loaders.TransformLoader,
) -> None:
"""Initialize the TransformManager."""
self.transform_loader = transform_loader
Expand All @@ -245,7 +245,7 @@ class SplitManager:

def __init__(
self,
split_loader: experiments.SplitLoader,
split_loader: loaders.SplitLoader,
) -> None:
"""Initialize the SplitManager."""
self.split_loader = split_loader
Expand Down Expand Up @@ -396,7 +396,7 @@ def __init__(
self,
config_path: str,
csv_path: str,
encoder_loader: experiments.EncoderLoader,
encoder_loader: loaders.EncoderLoader,
split: Union[int, None] = None,
) -> None:
"""Initialize the DatasetLoader."""
Expand Down
5 changes: 3 additions & 2 deletions src/stimulus/data/handlertorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@

from torch.utils.data import Dataset

from src.stimulus.data import data_handlers, experiments
from src.stimulus.data import data_handlers
from stimulus.data import loaders


class TorchDataset(Dataset):
Expand All @@ -14,7 +15,7 @@ def __init__(
self,
config_path: str,
csv_path: str,
encoder_loader: experiments.EncoderLoader,
encoder_loader: loaders.EncoderLoader,
split: Optional[int] = None,
) -> None:
"""Initialize the TorchDataset.
Expand Down
File renamed without changes.
83 changes: 83 additions & 0 deletions src/stimulus/typing/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
"""Typing for Stimulus Python API.
This module contains all Stimulus types which will be used for variable typing
and likely not instantiated, as well as aliases for other types to use for typing purposes.
The aliases from this module should be used for typing purposes only.
"""
# ruff: noqa: F401

from typing import TypeAlias

# these imports mostly alias everything
from stimulus.analysis.analysis_default import Analysis, AnalysisPerformanceTune, AnalysisRobustness
from stimulus.data.data_handlers import (
DatasetHandler,
DatasetLoader,
DatasetManager,
DatasetProcessor,
EncodeManager,
SplitManager,
TransformManager,
)
from stimulus.data.encoding.encoders import AbstractEncoder as Encoder
from stimulus.data.handlertorch import TorchDataset
from stimulus.data.loaders import EncoderLoader, SplitLoader, TransformLoader
from stimulus.data.splitters.splitters import AbstractSplitter as Splitter
from stimulus.data.transform.data_transformation_generators import AbstractDataTransformer as Transform
from stimulus.learner.predict import PredictWrapper
from stimulus.learner.raytune_learner import CheckpointDict, TuneModel, TuneWrapper
from stimulus.learner.raytune_parser import RayTuneMetrics, RayTuneOptimizer, RayTuneResult, TuneParser
from stimulus.utils.performance import Performance
from stimulus.utils.yaml_data import (
YamlColumns,
YamlColumnsEncoder,
YamlConfigDict,
YamlGlobalParams,
YamlSchema,
YamlSplit,
YamlSubConfigDict,
YamlTransform,
YamlTransformColumns,
YamlTransformColumnsTransformation,
)
from stimulus.utils.yaml_model_schema import (
CustomTunableParameter,
Data,
Loss,
Model,
RayTuneModel,
RunParams,
Scheduler,
TunableParameter,
Tune,
TuneParams,
YamlRayConfigLoader,
)

# data/data_handlers.py

DataManager: TypeAlias = DatasetManager | EncodeManager | SplitManager | TransformManager

# data/experiments.py

Loader: TypeAlias = DatasetLoader | EncoderLoader | TransformLoader | SplitLoader

# learner/raytune_parser.py

RayTuneData: TypeAlias = RayTuneMetrics | RayTuneOptimizer | RayTuneResult

# utils/yaml_data.py

YamlData: TypeAlias = (
YamlColumns
| YamlColumnsEncoder
| YamlConfigDict
| YamlGlobalParams
| YamlSchema
| YamlSplit
| YamlSubConfigDict
| YamlTransform
| YamlTransformColumns
| YamlTransformColumnsTransformation
)
2 changes: 1 addition & 1 deletion src/stimulus/utils/launch_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import os
from typing import Union

import stimulus.data.experiments as exp
import stimulus.data.loaders as exp


def import_class_from_file(file_path: str) -> type:
Expand Down
40 changes: 20 additions & 20 deletions tests/data/test_data_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pytest
import yaml

from stimulus.data import experiments
from stimulus.data import loaders
from stimulus.data.data_handlers import (
DatasetLoader,
DatasetManager,
Expand Down Expand Up @@ -82,7 +82,7 @@ def dump_single_split_config_to_disk() -> str:

## Loader fixtures
@pytest.fixture
def encoder_loader(generate_sub_configs: list[YamlConfigDict]) -> experiments.EncoderLoader:
def encoder_loader(generate_sub_configs: list[YamlConfigDict]) -> loaders.EncoderLoader:
"""Create encoder loader with initialized encoders.
Args:
Expand All @@ -91,13 +91,13 @@ def encoder_loader(generate_sub_configs: list[YamlConfigDict]) -> experiments.En
Returns:
experiments.EncoderLoader: Initialized encoder loader
"""
loader = experiments.EncoderLoader()
loader = loaders.EncoderLoader()
loader.initialize_column_encoders_from_config(generate_sub_configs[0].columns)
return loader


@pytest.fixture
def transform_loader(generate_sub_configs: list[YamlConfigDict]) -> experiments.TransformLoader:
def transform_loader(generate_sub_configs: list[YamlConfigDict]) -> loaders.TransformLoader:
"""Create transform loader with initialized transformers.
Args:
Expand All @@ -106,13 +106,13 @@ def transform_loader(generate_sub_configs: list[YamlConfigDict]) -> experiments.
Returns:
experiments.TransformLoader: Initialized transform loader
"""
loader = experiments.TransformLoader()
loader = loaders.TransformLoader()
loader.initialize_column_data_transformers_from_config(generate_sub_configs[0].transforms)
return loader


@pytest.fixture
def split_loader(generate_sub_configs: list[YamlConfigDict]) -> experiments.SplitLoader:
def split_loader(generate_sub_configs: list[YamlConfigDict]) -> loaders.SplitLoader:
"""Create split loader with initialized splitter.
Args:
Expand All @@ -121,7 +121,7 @@ def split_loader(generate_sub_configs: list[YamlConfigDict]) -> experiments.Spli
Returns:
experiments.SplitLoader: Initialized split loader
"""
loader = experiments.SplitLoader()
loader = loaders.SplitLoader()
loader.initialize_splitter_from_config(generate_sub_configs[0].split)
return loader

Expand Down Expand Up @@ -166,21 +166,21 @@ def test_dataset_manager_get_transform_logic(dump_single_split_config_to_disk: s
# Test EncodeManager
def test_encode_manager_init() -> None:
"""Test initialization of EncodeManager."""
encoder_loader = experiments.EncoderLoader()
encoder_loader = loaders.EncoderLoader()
manager = EncodeManager(encoder_loader)
assert hasattr(manager, "encoder_loader")


def test_encode_manager_initialize_encoders() -> None:
"""Test encoder initialization."""
encoder_loader = experiments.EncoderLoader()
encoder_loader = loaders.EncoderLoader()
manager = EncodeManager(encoder_loader)
assert hasattr(manager, "encoder_loader")


def test_encode_manager_encode_numeric() -> None:
"""Test numeric encoding."""
encoder_loader = experiments.EncoderLoader()
encoder_loader = loaders.EncoderLoader()
intencoder = encoder_loader.get_encoder("NumericEncoder")
encoder_loader.set_encoder_as_attribute("test_col", intencoder)
manager = EncodeManager(encoder_loader)
Expand All @@ -192,21 +192,21 @@ def test_encode_manager_encode_numeric() -> None:
# Test TransformManager
def test_transform_manager_init() -> None:
"""Test initialization of TransformManager."""
transform_loader = experiments.TransformLoader()
transform_loader = loaders.TransformLoader()
manager = TransformManager(transform_loader)
assert hasattr(manager, "transform_loader")


def test_transform_manager_initialize_transforms() -> None:
"""Test transform initialization."""
transform_loader = experiments.TransformLoader()
transform_loader = loaders.TransformLoader()
manager = TransformManager(transform_loader)
assert hasattr(manager, "transform_loader")


def test_transform_manager_transform_column() -> None:
"""Test column transformation."""
transform_loader = experiments.TransformLoader()
transform_loader = loaders.TransformLoader()
dummy_config = YamlTransform(
transformation_name="GaussianNoise",
columns=[
Expand All @@ -230,19 +230,19 @@ def test_transform_manager_transform_column() -> None:


# Test SplitManager
def test_split_manager_init(split_loader: experiments.SplitLoader) -> None:
def test_split_manager_init(split_loader: loaders.SplitLoader) -> None:
"""Test initialization of SplitManager."""
manager = SplitManager(split_loader)
assert hasattr(manager, "split_loader")


def test_split_manager_initialize_splits(split_loader: experiments.SplitLoader) -> None:
def test_split_manager_initialize_splits(split_loader: loaders.SplitLoader) -> None:
"""Test split initialization."""
manager = SplitManager(split_loader)
assert hasattr(manager, "split_loader")


def test_split_manager_apply_split(split_loader: experiments.SplitLoader) -> None:
def test_split_manager_apply_split(split_loader: loaders.SplitLoader) -> None:
"""Test applying splits to data."""
manager = SplitManager(split_loader)
data = {"col": range(100)}
Expand Down Expand Up @@ -271,7 +271,7 @@ def test_dataset_processor_init(
def test_dataset_processor_apply_split(
dump_single_split_config_to_disk: str,
titanic_csv_path: str,
split_loader: experiments.SplitLoader,
split_loader: loaders.SplitLoader,
) -> None:
"""Test applying splits in DatasetProcessor."""
processor = DatasetProcessor(
Expand All @@ -288,7 +288,7 @@ def test_dataset_processor_apply_split(
def test_dataset_processor_apply_transformation_group(
dump_single_split_config_to_disk: str,
titanic_csv_path: str,
transform_loader: experiments.TransformLoader,
transform_loader: loaders.TransformLoader,
) -> None:
"""Test applying transformation groups."""
processor = DatasetProcessor(
Expand Down Expand Up @@ -318,7 +318,7 @@ def test_dataset_processor_apply_transformation_group(
def test_dataset_loader_init(
dump_single_split_config_to_disk: str,
titanic_csv_path: str,
encoder_loader: experiments.EncoderLoader,
encoder_loader: loaders.EncoderLoader,
) -> None:
"""Test initialization of DatasetLoader."""
loader = DatasetLoader(
Expand All @@ -336,7 +336,7 @@ def test_dataset_loader_init(
def test_dataset_loader_get_dataset(
dump_single_split_config_to_disk: str,
titanic_csv_path: str,
encoder_loader: experiments.EncoderLoader,
encoder_loader: loaders.EncoderLoader,
) -> None:
"""Test getting dataset from loader."""
loader = DatasetLoader(
Expand Down
Loading

0 comments on commit 5e6faa0

Please sign in to comment.