From 8230e30c9a6f567de5a16e58a3d85af302abb39f Mon Sep 17 00:00:00 2001 From: luiz Date: Tue, 7 May 2024 16:38:01 +0200 Subject: [PATCH 01/15] m --- dependencies/Dockerfile_1 | 13 ---- dependencies/requirements_0.txt | 4 +- pieces/ExampleComplexPiece/metadata.json | 24 -------- pieces/ExampleComplexPiece/models.py | 23 ------- pieces/ExampleComplexPiece/piece.py | 35 ----------- pieces/ExampleSimplePiece/models.py | 36 ----------- pieces/ExampleSimplePiece/piece.py | 51 ---------------- .../metadata.json | 0 pieces/ProphetTrainModelPiece/models.py | 60 +++++++++++++++++++ pieces/ProphetTrainModelPiece/piece.py | 29 +++++++++ .../test_example_simple_piece.py | 0 11 files changed, 92 insertions(+), 183 deletions(-) delete mode 100644 dependencies/Dockerfile_1 delete mode 100644 pieces/ExampleComplexPiece/metadata.json delete mode 100644 pieces/ExampleComplexPiece/models.py delete mode 100644 pieces/ExampleComplexPiece/piece.py delete mode 100644 pieces/ExampleSimplePiece/models.py delete mode 100644 pieces/ExampleSimplePiece/piece.py rename pieces/{ExampleSimplePiece => ProphetTrainModelPiece}/metadata.json (100%) create mode 100644 pieces/ProphetTrainModelPiece/models.py create mode 100644 pieces/ProphetTrainModelPiece/piece.py rename pieces/{ExampleSimplePiece => ProphetTrainModelPiece}/test_example_simple_piece.py (100%) diff --git a/dependencies/Dockerfile_1 b/dependencies/Dockerfile_1 deleted file mode 100644 index 0ffb4e8..0000000 --- a/dependencies/Dockerfile_1 +++ /dev/null @@ -1,13 +0,0 @@ -FROM taufferconsulting/domino-base-piece:latest - -# Install specific requirements to run OpenCV -RUN apt-get update -RUN apt-get install ffmpeg libsm6 libxext6 -y - -# Need to copy pieces source code -COPY config.toml domino/pieces_repository/ -COPY pieces domino/pieces_repository/pieces -COPY .domino domino/pieces_repository/.domino - -# Install specific dependencies in domino_env virtual environment -RUN pip install --no-cache-dir opencv-python scipy diff --git a/dependencies/requirements_0.txt b/dependencies/requirements_0.txt index 1faa2fd..feff9ab 100644 --- a/dependencies/requirements_0.txt +++ b/dependencies/requirements_0.txt @@ -1 +1,3 @@ -numpy==1.23.5 \ No newline at end of file +prophet==1.1.5 +pandas==2.1.3 +plotly==5.18.0 \ No newline at end of file diff --git a/pieces/ExampleComplexPiece/metadata.json b/pieces/ExampleComplexPiece/metadata.json deleted file mode 100644 index f50d336..0000000 --- a/pieces/ExampleComplexPiece/metadata.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "ExampleComplexPiece", - "description": "This is an example of a complex Domino Piece", - "dependency": { - "dockerfile": "Dockerfile_1" - }, - "container_resources": { - "requests": { - "cpu": "100m", - "memory": "128Mi" - }, - "limits": { - "cpu": "500m", - "memory": "512Mi" - } - }, - "tags": [ - "Example" - ], - "style": { - "node_label": "Complex Piece", - "icon_class_name": "fa-solid:database" - } -} \ No newline at end of file diff --git a/pieces/ExampleComplexPiece/models.py b/pieces/ExampleComplexPiece/models.py deleted file mode 100644 index a59fdc2..0000000 --- a/pieces/ExampleComplexPiece/models.py +++ /dev/null @@ -1,23 +0,0 @@ -from pydantic import BaseModel, Field -from enum import Enum - - -class InputModel(BaseModel): - arg1: str = Field( - description="Distribution mean" - ) - - -class OutputModel(BaseModel): - message: str = Field( - description="Output message to log" - ) - result: str = Field( - description="The result of this Piece's processing" - ) - - -class SecretsModel(BaseModel): - EXAMPLE_OPERATOR_SECRET_2: str = Field( - description="A secret necessary to run this Piece" - ) \ No newline at end of file diff --git a/pieces/ExampleComplexPiece/piece.py b/pieces/ExampleComplexPiece/piece.py deleted file mode 100644 index 2c88f87..0000000 --- a/pieces/ExampleComplexPiece/piece.py +++ /dev/null @@ -1,35 +0,0 @@ -from domino.base_piece import BasePiece -from .models import InputModel, OutputModel, SecretsModel -import os - - -class ExampleComplexPiece(BasePiece): - """ - This Piece serves as a more complex example, using Dockerfile as dependency, from where you can start writing your own Piece. - Remember to also change all other required files accordingly: - - piece.py (this file) - - models.py - - metadata.json - - requirements.txt or Dockerfile if needed - """ - - def piece_function(self, input_data: InputModel, secrets_data: SecretsModel): - - # Input arguments are retrieved from the Input model object - arg1 = input_data.arg1 - - # If this Piece needs to use a Secret value, it can retrieve it from Secrets Model object using secrets_data argument - piece_secret = secrets_data.EXAMPLE_OPERATOR_SECRET_2 - - # Basic logging is already implemented in the BasePiece class - self.logger.info("Starting piece process...") - - # Here we add the Piece function logic - message = "" - result = "" - - # Finally, results should return as an Output model - return OutputModel( - message=message, - result=result - ) \ No newline at end of file diff --git a/pieces/ExampleSimplePiece/models.py b/pieces/ExampleSimplePiece/models.py deleted file mode 100644 index 29e5743..0000000 --- a/pieces/ExampleSimplePiece/models.py +++ /dev/null @@ -1,36 +0,0 @@ -from pydantic import BaseModel, Field -from enum import Enum - - -class DistributionType(str, Enum): - gaussian = "gaussian" - poisson = "poisson" - - -class InputModel(BaseModel): - distribution_name: DistributionType = Field( - description="Name of the distribution to sample from" - ) - distribution_mean: float = Field( - description="Distribution mean" - ) - distribution_sd: float = Field( - default=1., - gt=0., - description="Distribution standard deviation" - ) - - -class OutputModel(BaseModel): - message: str = Field( - description="Output message to log" - ) - sample_result: str = Field( - description="The result of this Piece's processing" - ) - - -class SecretsModel(BaseModel): - EXAMPLE_OPERATOR_SECRET_1: str = Field( - description="A secret necessary to run this Piece" - ) \ No newline at end of file diff --git a/pieces/ExampleSimplePiece/piece.py b/pieces/ExampleSimplePiece/piece.py deleted file mode 100644 index 2257760..0000000 --- a/pieces/ExampleSimplePiece/piece.py +++ /dev/null @@ -1,51 +0,0 @@ -from domino.base_piece import BasePiece -from .models import InputModel, OutputModel, SecretsModel -import os - -import numpy as np - - -class ExampleSimplePiece(BasePiece): - """ - This Piece serves as a simple example, from where you can start writing your own Piece. - Remember to also change all other required files accordingly: - - piece.py (this file) - - models.py - - metadata.json - - requirements.txt or Dockerfile if needed - """ - - def piece_function(self, input_data: InputModel, secrets_data: SecretsModel): - - # Input arguments are retrieved from the Input model object - distribution_name = input_data.distribution_name - distribution_mean = input_data.distribution_mean - distribution_sd = input_data.distribution_sd - - # If this Piece needs to use a Secret value, it can retrieve it from Secrets Model object using secrets_data argument - piece_secret = secrets_data.EXAMPLE_OPERATOR_SECRET_1 - - # Basic logging is already implemented in the BasePiece class - self.logger.info("Starting sampling process...") - - # Here we add the Piece function logic - message = "" - if distribution_name == "gaussian": - sample_result = np.random.normal(distribution_mean, distribution_sd) - - elif distribution_name == "poisson": - if distribution_mean < 0: - distribution_mean = abs(distribution_mean) - message += "\n" - message += "Poisson distributions only accept positive mean values. Applying abs() to the value received." - sample_result = np.random.poisson(distribution_mean) - - self.logger.info(f"Sampled from a gaussian distribution with mean={distribution_mean} and sd={distribution_sd}") - message += "\n" - message += "Sampling operation was successful!" - - # Finally, results should return as an Output model - return OutputModel( - message=message, - sample_result=sample_result - ) \ No newline at end of file diff --git a/pieces/ExampleSimplePiece/metadata.json b/pieces/ProphetTrainModelPiece/metadata.json similarity index 100% rename from pieces/ExampleSimplePiece/metadata.json rename to pieces/ProphetTrainModelPiece/metadata.json diff --git a/pieces/ProphetTrainModelPiece/models.py b/pieces/ProphetTrainModelPiece/models.py new file mode 100644 index 0000000..9356641 --- /dev/null +++ b/pieces/ProphetTrainModelPiece/models.py @@ -0,0 +1,60 @@ +from pydantic import BaseModel, Field +from enum import Enum +from typing import List +from datetime import date + + +class GrowthTrend(str, Enum): + linear = "linear" + logistic = "logistic" + flat = "flat" + + +class SeasonalityMode(str, Enum): + additive = "additive" + multiplicative = "multiplicative" + + +class InputModel(BaseModel): + input_data_file: str = Field( + description="Path to the input data file. Accepted formats: `.csv`, `.json`." + ) + datetime_column_name: str = Field( + description="Name of the column containing the datetime values." + ) + target_column_name: str = Field( + description="Name of the column containing the target values." + ) + test_set_percentage: float = Field( + default=20.0, + ge=1, + le=90, + description="Percentage of the data to use as test set. Default is 20%." + ) + growth_trend: GrowthTrend = Field( + default=GrowthTrend.linear, + description="The growth trend of the data. Options are `linear`, `logistic` and `flat`. Default is `linear`." + ) + changepoints: List[date] = Field( + default=[], + description="List of dates at which to include potential changepoints. If not specified, potential changepoints are selected automatically." + ) + n_changepoints: int = Field( + default=25, + ge=0, + le=1000, + description=" Number of potential changepoints to include. Not used if input `changepoints` is supplied." + ) + seasonality_mode: SeasonalityMode = Field( + default=SeasonalityMode.additive, + description="The seasonality mode of the data. Options are `additive` and `multiplicative`. Default is `additive`." + ) + + +class OutputModel(BaseModel): + model_file_path: str = Field( + description="Path to the file containing the trained model." + ) + results_figure_file_path: str = Field( + description="Path to the file containing the results figure." + ) diff --git a/pieces/ProphetTrainModelPiece/piece.py b/pieces/ProphetTrainModelPiece/piece.py new file mode 100644 index 0000000..84f59c1 --- /dev/null +++ b/pieces/ProphetTrainModelPiece/piece.py @@ -0,0 +1,29 @@ +from domino.base_piece import BasePiece +from .models import InputModel, OutputModel +import pandas as pd +from prophet import Prophet +from prophet.serialize import model_to_json +import plotly.graph_objs as go + + +class ProphetTrainModelPiece(BasePiece): + """ + This Piece trains a Prophet model using the data provided in the input file. + """ + def piece_function(self, input_data: InputModel): + + # Load data + input_data_file = input_data.input_data_file + if input_data_file.endswith('.csv'): + df = pd.read_csv(input_data_file) + elif input_data_file.endswith('.json'): + df = pd.read_json(input_data_file) + else: + raise ValueError("File format not supported. Please pass a CSV or JSON file.") + + + + return OutputModel( + model_file_path=, + results_figure_file_path= + ) \ No newline at end of file diff --git a/pieces/ExampleSimplePiece/test_example_simple_piece.py b/pieces/ProphetTrainModelPiece/test_example_simple_piece.py similarity index 100% rename from pieces/ExampleSimplePiece/test_example_simple_piece.py rename to pieces/ProphetTrainModelPiece/test_example_simple_piece.py From 213c0e8853d903a0e331c56012e383a27ae2fdde Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 13:47:24 -0300 Subject: [PATCH 02/15] pieces --- dependencies/requirements_0.txt | 3 +- pieces/GetYahooFinanceDataPiece/metadata.json | 24 +++++++++++ pieces/GetYahooFinanceDataPiece/models.py | 20 ++++++++++ pieces/GetYahooFinanceDataPiece/piece.py | 24 +++++++++++ pieces/ProphetPredictPiece/metadata.json | 24 +++++++++++ pieces/ProphetPredictPiece/models.py | 26 ++++++++++++ pieces/ProphetPredictPiece/piece.py | 40 +++++++++++++++++++ .../test_example_simple_piece.py | 15 +++++++ pieces/ProphetTrainModelPiece/metadata.json | 8 ++-- pieces/ProphetTrainModelPiece/models.py | 23 ++++++----- pieces/ProphetTrainModelPiece/piece.py | 14 ++++--- 11 files changed, 200 insertions(+), 21 deletions(-) create mode 100644 pieces/GetYahooFinanceDataPiece/metadata.json create mode 100644 pieces/GetYahooFinanceDataPiece/models.py create mode 100644 pieces/GetYahooFinanceDataPiece/piece.py create mode 100644 pieces/ProphetPredictPiece/metadata.json create mode 100644 pieces/ProphetPredictPiece/models.py create mode 100644 pieces/ProphetPredictPiece/piece.py create mode 100644 pieces/ProphetPredictPiece/test_example_simple_piece.py diff --git a/dependencies/requirements_0.txt b/dependencies/requirements_0.txt index feff9ab..03a4e09 100644 --- a/dependencies/requirements_0.txt +++ b/dependencies/requirements_0.txt @@ -1,3 +1,4 @@ prophet==1.1.5 pandas==2.1.3 -plotly==5.18.0 \ No newline at end of file +plotly==5.18.0 +yfinance==0.2.38 \ No newline at end of file diff --git a/pieces/GetYahooFinanceDataPiece/metadata.json b/pieces/GetYahooFinanceDataPiece/metadata.json new file mode 100644 index 0000000..0efc7b6 --- /dev/null +++ b/pieces/GetYahooFinanceDataPiece/metadata.json @@ -0,0 +1,24 @@ +{ + "name": "GetYahooFinanceDataPiece", + "description": "This piece gets data from Yahoo Finance.", + "dependency": { + "requirements_file": "requirements_0.txt" + }, + "container_resources": { + "requests": { + "cpu": "100m", + "memory": "128Mi" + }, + "limits": { + "cpu": "500m", + "memory": "512Mi" + } + }, + "tags": [ + "Example" + ], + "style": { + "node_label": "Get Yahoo Finance Data", + "icon_class_name": "fa-solid:database" + } +} \ No newline at end of file diff --git a/pieces/GetYahooFinanceDataPiece/models.py b/pieces/GetYahooFinanceDataPiece/models.py new file mode 100644 index 0000000..9e0b905 --- /dev/null +++ b/pieces/GetYahooFinanceDataPiece/models.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel, Field +from datetime import date + + + +class InputModel(BaseModel): + ticker: str = Field( + description="Ticker of the stock to get data from." + ) # TODO change to ENUM ? + start_date: date = Field( + description="Start date of the data to get." + ) + end_date: date = Field( + description="End date of the data to get." + ) + +class OutputModel(BaseModel): + data_path: str = Field( + description="Path to the file containing the trained model." + ) diff --git a/pieces/GetYahooFinanceDataPiece/piece.py b/pieces/GetYahooFinanceDataPiece/piece.py new file mode 100644 index 0000000..862b54d --- /dev/null +++ b/pieces/GetYahooFinanceDataPiece/piece.py @@ -0,0 +1,24 @@ +from domino.base_piece import BasePiece +from .models import InputModel, OutputModel +import yfinance as yf +from pathlib import Path + + +class GetYahooFinanceDataPiece(BasePiece): + """ + This Piece trains a Prophet model using the data provided in the input file. + """ + def piece_function(self, input_data: InputModel): + ticker = input_data.ticker + start_date = input_data.start_date + end_date = input_data.end_date + + df = yf.download(ticker, start=start_date, end=end_date) + df.reset_index(inplace=True) + + df_path = Path(self.results_path) / f"{ticker}_data.csv" + df.to_csv(df_path, index=False) + + return OutputModel( + data_path=str(df_path) + ) \ No newline at end of file diff --git a/pieces/ProphetPredictPiece/metadata.json b/pieces/ProphetPredictPiece/metadata.json new file mode 100644 index 0000000..0a9b922 --- /dev/null +++ b/pieces/ProphetPredictPiece/metadata.json @@ -0,0 +1,24 @@ +{ + "name": "ProphetPredictPiece", + "description": "Piece to predict using Prophet", + "dependency": { + "requirements_file": "requirements_0.txt" + }, + "container_resources": { + "requests": { + "cpu": "100m", + "memory": "128Mi" + }, + "limits": { + "cpu": "500m", + "memory": "512Mi" + } + }, + "tags": [ + "Prophet" + ], + "style": { + "node_label": "Prophet Predict", + "icon_class_name": "fa-solid:database" + } +} \ No newline at end of file diff --git a/pieces/ProphetPredictPiece/models.py b/pieces/ProphetPredictPiece/models.py new file mode 100644 index 0000000..fb150b5 --- /dev/null +++ b/pieces/ProphetPredictPiece/models.py @@ -0,0 +1,26 @@ +from pydantic import BaseModel, Field +from enum import Enum +from typing import List +from datetime import date + + +class InputModel(BaseModel): + model_path: str = Field( + title="Model Path", + description="Path to the file containing the trained model." + ) + periods: int = Field( + title="Periods", + description="Number of periods to forecast." + ) + + +class OutputModel(BaseModel): + forecast_data_path: str = Field( + title="Forecast Data Path", + description="Path to the file containing the forecast data." + ) + forecast_figure_path: str = Field( + title="Forecast Figure Path", + description="Path to the file containing the results figure." + ) diff --git a/pieces/ProphetPredictPiece/piece.py b/pieces/ProphetPredictPiece/piece.py new file mode 100644 index 0000000..f5d50e5 --- /dev/null +++ b/pieces/ProphetPredictPiece/piece.py @@ -0,0 +1,40 @@ +from domino.base_piece import BasePiece +from .models import InputModel, OutputModel +import pandas as pd +from prophet import Prophet +import pickle +from pathlib import Path +from prophet.plot import plot_plotly, plot_components_plotly + + + +class ProphetPredictPiece(BasePiece): + """ + This Piece uses a trained Prophet model to make predictions on new data. + """ + def piece_function(self, input_data: InputModel): + + with open(input_data.model_path, "rb") as f: + model = pickle.load(f) + + future = model.make_future_dataframe(periods=input_data.periods) + forecast = model.predict(future) + + self.results_path = Path(self.results_path) + + forecast_data_path = self.results_path / "forecast_data.csv" + forecast.to_csv(forecast_data_path, index=False) + + forecast_figure_path = self.results_path / "forecast_figure.json" + forecast_figure = plot_plotly(model, forecast) + + forecast_figure.write_json(str(forecast_figure_path)) + self.display_result = { + "file_type": "plotly_json", + "file_path": str(forecast_figure_path) + } + + return OutputModel( + forecast_data_path=str(forecast_data_path), + forecast_figure_path=str(forecast_figure_path), + ) \ No newline at end of file diff --git a/pieces/ProphetPredictPiece/test_example_simple_piece.py b/pieces/ProphetPredictPiece/test_example_simple_piece.py new file mode 100644 index 0000000..168a611 --- /dev/null +++ b/pieces/ProphetPredictPiece/test_example_simple_piece.py @@ -0,0 +1,15 @@ +from domino.testing import piece_dry_run + +def test_example_simple_piece(): + input_data = dict( + distribution_name="gaussian", + distribution_mean=0., + distribution_sd=1. + ) + output_data = piece_dry_run( + "ExampleSimplePiece", + input_data + ) + + assert output_data["message"] is not None + assert output_data["sample_result"] is not None \ No newline at end of file diff --git a/pieces/ProphetTrainModelPiece/metadata.json b/pieces/ProphetTrainModelPiece/metadata.json index 1bad3ce..f347465 100644 --- a/pieces/ProphetTrainModelPiece/metadata.json +++ b/pieces/ProphetTrainModelPiece/metadata.json @@ -1,6 +1,6 @@ { - "name": "ExampleSimplePiece", - "description": "This is an example of a simple Domino Piece", + "name": "ProphetTrainModelPiece", + "description": "Piece to train a prophet model", "dependency": { "requirements_file": "requirements_0.txt" }, @@ -15,10 +15,10 @@ } }, "tags": [ - "Example" + "Prophet" ], "style": { - "node_label": "Simple Piece", + "node_label": "Prophet Train", "icon_class_name": "fa-solid:database" } } \ No newline at end of file diff --git a/pieces/ProphetTrainModelPiece/models.py b/pieces/ProphetTrainModelPiece/models.py index 9356641..b1abb87 100644 --- a/pieces/ProphetTrainModelPiece/models.py +++ b/pieces/ProphetTrainModelPiece/models.py @@ -17,14 +17,15 @@ class SeasonalityMode(str, Enum): class InputModel(BaseModel): input_data_file: str = Field( - description="Path to the input data file. Accepted formats: `.csv`, `.json`." - ) - datetime_column_name: str = Field( - description="Name of the column containing the datetime values." - ) - target_column_name: str = Field( - description="Name of the column containing the target values." - ) + title="Input Data File", + description="Path to the input data file. Accepted formats: `.csv`, `.json`. Should use the following format: `ds` (datetime), `y` (target).", + ) + # datetime_column_name: str = Field( + # description="Name of the column containing the datetime values." + # ) + # target_column_name: str = Field( + # description="Name of the column containing the target values." + # ) test_set_percentage: float = Field( default=20.0, ge=1, @@ -55,6 +56,6 @@ class OutputModel(BaseModel): model_file_path: str = Field( description="Path to the file containing the trained model." ) - results_figure_file_path: str = Field( - description="Path to the file containing the results figure." - ) + # results_figure_file_path: str = Field( + # description="Path to the file containing the results figure." + # ) diff --git a/pieces/ProphetTrainModelPiece/piece.py b/pieces/ProphetTrainModelPiece/piece.py index 84f59c1..7c83608 100644 --- a/pieces/ProphetTrainModelPiece/piece.py +++ b/pieces/ProphetTrainModelPiece/piece.py @@ -2,9 +2,8 @@ from .models import InputModel, OutputModel import pandas as pd from prophet import Prophet -from prophet.serialize import model_to_json -import plotly.graph_objs as go - +import pickle +from pathlib import Path class ProphetTrainModelPiece(BasePiece): """ @@ -21,9 +20,14 @@ def piece_function(self, input_data: InputModel): else: raise ValueError("File format not supported. Please pass a CSV or JSON file.") + model = Prophet() + model.fit(df) + # Serialize model + model_file_path = self.results_path / "prophet_model.json" + with open(str(model_file_path), "wb") as f: + pickle.dump(model, f) return OutputModel( - model_file_path=, - results_figure_file_path= + model_file_path=str(model_file_path), ) \ No newline at end of file From 1f679c80626cc56a25852e0a08f95f1282f9b6b0 Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 14:02:48 -0300 Subject: [PATCH 03/15] tests --- .../test_example_simple_piece.py | 15 --------------- .../test_prophet_predict_piece.py | 4 ++++ .../test_example_simple_piece.py | 15 --------------- .../test_prophet_train_model_piece.py | 4 ++++ 4 files changed, 8 insertions(+), 30 deletions(-) delete mode 100644 pieces/ProphetPredictPiece/test_example_simple_piece.py create mode 100644 pieces/ProphetPredictPiece/test_prophet_predict_piece.py delete mode 100644 pieces/ProphetTrainModelPiece/test_example_simple_piece.py create mode 100644 pieces/ProphetTrainModelPiece/test_prophet_train_model_piece.py diff --git a/pieces/ProphetPredictPiece/test_example_simple_piece.py b/pieces/ProphetPredictPiece/test_example_simple_piece.py deleted file mode 100644 index 168a611..0000000 --- a/pieces/ProphetPredictPiece/test_example_simple_piece.py +++ /dev/null @@ -1,15 +0,0 @@ -from domino.testing import piece_dry_run - -def test_example_simple_piece(): - input_data = dict( - distribution_name="gaussian", - distribution_mean=0., - distribution_sd=1. - ) - output_data = piece_dry_run( - "ExampleSimplePiece", - input_data - ) - - assert output_data["message"] is not None - assert output_data["sample_result"] is not None \ No newline at end of file diff --git a/pieces/ProphetPredictPiece/test_prophet_predict_piece.py b/pieces/ProphetPredictPiece/test_prophet_predict_piece.py new file mode 100644 index 0000000..af4c3dd --- /dev/null +++ b/pieces/ProphetPredictPiece/test_prophet_predict_piece.py @@ -0,0 +1,4 @@ +from domino.testing import piece_dry_run + +def test_prophet_predict_piece(): + ... \ No newline at end of file diff --git a/pieces/ProphetTrainModelPiece/test_example_simple_piece.py b/pieces/ProphetTrainModelPiece/test_example_simple_piece.py deleted file mode 100644 index 168a611..0000000 --- a/pieces/ProphetTrainModelPiece/test_example_simple_piece.py +++ /dev/null @@ -1,15 +0,0 @@ -from domino.testing import piece_dry_run - -def test_example_simple_piece(): - input_data = dict( - distribution_name="gaussian", - distribution_mean=0., - distribution_sd=1. - ) - output_data = piece_dry_run( - "ExampleSimplePiece", - input_data - ) - - assert output_data["message"] is not None - assert output_data["sample_result"] is not None \ No newline at end of file diff --git a/pieces/ProphetTrainModelPiece/test_prophet_train_model_piece.py b/pieces/ProphetTrainModelPiece/test_prophet_train_model_piece.py new file mode 100644 index 0000000..bf0946c --- /dev/null +++ b/pieces/ProphetTrainModelPiece/test_prophet_train_model_piece.py @@ -0,0 +1,4 @@ +from domino.testing import piece_dry_run + +def test_prophet_train_model_piece(): + ... \ No newline at end of file From f35385024d1e8516e3447e411729a7b9be428b2b Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 14:09:30 -0300 Subject: [PATCH 04/15] resources --- pieces/GetYahooFinanceDataPiece/metadata.json | 8 ++++---- pieces/ProphetPredictPiece/metadata.json | 8 ++++---- pieces/ProphetTrainModelPiece/metadata.json | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pieces/GetYahooFinanceDataPiece/metadata.json b/pieces/GetYahooFinanceDataPiece/metadata.json index 0efc7b6..e1bdd23 100644 --- a/pieces/GetYahooFinanceDataPiece/metadata.json +++ b/pieces/GetYahooFinanceDataPiece/metadata.json @@ -6,12 +6,12 @@ }, "container_resources": { "requests": { - "cpu": "100m", - "memory": "128Mi" + "cpu": 100, + "memory": 128 }, "limits": { - "cpu": "500m", - "memory": "512Mi" + "cpu": 500, + "memory": 512 } }, "tags": [ diff --git a/pieces/ProphetPredictPiece/metadata.json b/pieces/ProphetPredictPiece/metadata.json index 0a9b922..ebe176c 100644 --- a/pieces/ProphetPredictPiece/metadata.json +++ b/pieces/ProphetPredictPiece/metadata.json @@ -6,12 +6,12 @@ }, "container_resources": { "requests": { - "cpu": "100m", - "memory": "128Mi" + "cpu": 100, + "memory": 128 }, "limits": { - "cpu": "500m", - "memory": "512Mi" + "cpu": 500, + "memory": 512 } }, "tags": [ diff --git a/pieces/ProphetTrainModelPiece/metadata.json b/pieces/ProphetTrainModelPiece/metadata.json index f347465..fc3b5c3 100644 --- a/pieces/ProphetTrainModelPiece/metadata.json +++ b/pieces/ProphetTrainModelPiece/metadata.json @@ -6,12 +6,12 @@ }, "container_resources": { "requests": { - "cpu": "100m", - "memory": "128Mi" + "cpu": 100, + "memory": 128 }, "limits": { - "cpu": "500m", - "memory": "512Mi" + "cpu": 500, + "memory": 512 } }, "tags": [ From db9724944829d786ba636c794529b4ab9791bec6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 7 May 2024 17:11:27 +0000 Subject: [PATCH 05/15] auto-organize --- .domino/compiled_metadata.json | 282 ++++++++++++++++++++++++++------- .domino/dependencies_map.json | 10 +- 2 files changed, 231 insertions(+), 61 deletions(-) diff --git a/.domino/compiled_metadata.json b/.domino/compiled_metadata.json index 397630c..6075446 100644 --- a/.domino/compiled_metadata.json +++ b/.domino/compiled_metadata.json @@ -1,110 +1,280 @@ { - "ExampleSimplePiece": { - "name": "ExampleSimplePiece", + "ProphetTrainModelPiece": { + "name": "ProphetTrainModelPiece", "dependency": { "dockerfile": null, "requirements_file": "requirements_0.txt" }, "tags": [ - "Example" + "Prophet" ], "style": { - "node_label": "Simple Piece", + "node_label": "Prophet Train", "node_type": "default", "node_style": { "backgroundColor": "#ebebeb" }, "useIcon": true, - "icon_class_name": "fas fa-database", + "icon_class_name": "fa-solid:database", "iconStyle": { "cursor": "pointer" } }, - "description": "This is an example of a simple Domino Piece", + "description": "Piece to train a prophet model", "container_resources": { "requests": { - "cpu": "100m", - "memory": "128Mi" + "cpu": 100, + "memory": 128 }, "limits": { - "cpu": "500m", - "memory": "512Mi" + "cpu": 500, + "memory": 512 } }, "input_schema": { - "title": "InputModel", - "type": "object", + "$defs": { + "GrowthTrend": { + "enum": [ + "linear", + "logistic", + "flat" + ], + "title": "GrowthTrend", + "type": "string" + }, + "SeasonalityMode": { + "enum": [ + "additive", + "multiplicative" + ], + "title": "SeasonalityMode", + "type": "string" + } + }, "properties": { - "distribution_name": { - "description": "Name of the distribution to sample from", + "input_data_file": { + "description": "Path to the input data file. Accepted formats: `.csv`, `.json`. Should use the following format: `ds` (datetime), `y` (target).", + "title": "Input Data File", + "type": "string" + }, + "test_set_percentage": { + "default": 20.0, + "description": "Percentage of the data to use as test set. Default is 20%.", + "maximum": 90.0, + "minimum": 1.0, + "title": "Test Set Percentage", + "type": "number" + }, + "growth_trend": { "allOf": [ { - "$ref": "#/definitions/DistributionType" + "$ref": "#/$defs/GrowthTrend" } - ] + ], + "default": "linear", + "description": "The growth trend of the data. Options are `linear`, `logistic` and `flat`. Default is `linear`." }, - "distribution_mean": { - "title": "Distribution Mean", - "description": "Distribution mean", - "type": "number" + "changepoints": { + "default": [], + "description": "List of dates at which to include potential changepoints. If not specified, potential changepoints are selected automatically.", + "items": { + "format": "date", + "type": "string" + }, + "title": "Changepoints", + "type": "array" }, - "distribution_sd": { - "title": "Distribution Sd", - "description": "Distribution standard deviation", - "default": 1.0, - "exclusiveMinimum": 0.0, - "type": "number" + "n_changepoints": { + "default": 25, + "description": " Number of potential changepoints to include. Not used if input `changepoints` is supplied.", + "maximum": 1000, + "minimum": 0, + "title": "N Changepoints", + "type": "integer" + }, + "seasonality_mode": { + "allOf": [ + { + "$ref": "#/$defs/SeasonalityMode" + } + ], + "default": "additive", + "description": "The seasonality mode of the data. Options are `additive` and `multiplicative`. Default is `additive`." } }, "required": [ - "distribution_name", - "distribution_mean" + "input_data_file" ], - "definitions": { - "DistributionType": { - "title": "DistributionType", - "description": "An enumeration.", - "enum": [ - "gaussian", - "poisson" - ], + "title": "InputModel", + "type": "object" + }, + "output_schema": { + "properties": { + "model_file_path": { + "description": "Path to the file containing the trained model.", + "title": "Model File Path", "type": "string" } + }, + "required": [ + "model_file_path" + ], + "title": "OutputModel", + "type": "object" + }, + "secrets_schema": null, + "source_url": "https://github.com/Tauffer-Consulting/timeseries_domino_pieces/tree/main/pieces/ProphetTrainModelPiece" + }, + "GetYahooFinanceDataPiece": { + "name": "GetYahooFinanceDataPiece", + "dependency": { + "dockerfile": null, + "requirements_file": "requirements_0.txt" + }, + "tags": [ + "Example" + ], + "style": { + "node_label": "Get Yahoo Finance Data", + "node_type": "default", + "node_style": { + "backgroundColor": "#ebebeb" + }, + "useIcon": true, + "icon_class_name": "fa-solid:database", + "iconStyle": { + "cursor": "pointer" } }, + "description": "This piece gets data from Yahoo Finance.", + "container_resources": { + "requests": { + "cpu": 100, + "memory": 128 + }, + "limits": { + "cpu": 500, + "memory": 512 + } + }, + "input_schema": { + "properties": { + "ticker": { + "description": "Ticker of the stock to get data from.", + "title": "Ticker", + "type": "string" + }, + "start_date": { + "description": "Start date of the data to get.", + "format": "date", + "title": "Start Date", + "type": "string" + }, + "end_date": { + "description": "End date of the data to get.", + "format": "date", + "title": "End Date", + "type": "string" + } + }, + "required": [ + "ticker", + "start_date", + "end_date" + ], + "title": "InputModel", + "type": "object" + }, "output_schema": { + "properties": { + "data_path": { + "description": "Path to the file containing the trained model.", + "title": "Data Path", + "type": "string" + } + }, + "required": [ + "data_path" + ], "title": "OutputModel", - "type": "object", + "type": "object" + }, + "secrets_schema": null, + "source_url": "https://github.com/Tauffer-Consulting/timeseries_domino_pieces/tree/main/pieces/GetYahooFinanceDataPiece" + }, + "ProphetPredictPiece": { + "name": "ProphetPredictPiece", + "dependency": { + "dockerfile": null, + "requirements_file": "requirements_0.txt" + }, + "tags": [ + "Prophet" + ], + "style": { + "node_label": "Prophet Predict", + "node_type": "default", + "node_style": { + "backgroundColor": "#ebebeb" + }, + "useIcon": true, + "icon_class_name": "fa-solid:database", + "iconStyle": { + "cursor": "pointer" + } + }, + "description": "Piece to predict using Prophet", + "container_resources": { + "requests": { + "cpu": 100, + "memory": 128 + }, + "limits": { + "cpu": 500, + "memory": 512 + } + }, + "input_schema": { "properties": { - "message": { - "title": "Message", - "description": "Output message to log", + "model_path": { + "description": "Path to the file containing the trained model.", + "title": "Model Path", "type": "string" }, - "sample_result": { - "title": "Sample Result", - "description": "The result of this Piece's processing", - "type": "string" + "periods": { + "description": "Number of periods to forecast.", + "title": "Periods", + "type": "integer" } }, "required": [ - "message", - "sample_result" - ] + "model_path", + "periods" + ], + "title": "InputModel", + "type": "object" }, - "secrets_schema": { - "title": "SecretsModel", - "type": "object", + "output_schema": { "properties": { - "EXAMPLE_OPERATOR_SECRET_1": { - "title": "Example Operator Secret 1", - "description": "A secret necessary to run this Piece", + "forecast_data_path": { + "description": "Path to the file containing the forecast data.", + "title": "Forecast Data Path", + "type": "string" + }, + "forecast_figure_path": { + "description": "Path to the file containing the results figure.", + "title": "Forecast Figure Path", "type": "string" } }, "required": [ - "EXAMPLE_OPERATOR_SECRET_1" - ] - } + "forecast_data_path", + "forecast_figure_path" + ], + "title": "OutputModel", + "type": "object" + }, + "secrets_schema": null, + "source_url": "https://github.com/Tauffer-Consulting/timeseries_domino_pieces/tree/main/pieces/ProphetPredictPiece" } } \ No newline at end of file diff --git a/.domino/dependencies_map.json b/.domino/dependencies_map.json index 493dfb9..03e0f3d 100644 --- a/.domino/dependencies_map.json +++ b/.domino/dependencies_map.json @@ -5,11 +5,11 @@ "requirements_file": "requirements_0.txt" }, "pieces": [ - "ExampleSimplePiece" + "ProphetTrainModelPiece", + "GetYahooFinanceDataPiece", + "ProphetPredictPiece" ], - "secrets": [ - "EXAMPLE_OPERATOR_SECRET_1" - ], - "source_image": "ghcr.io/enter-your-registry-name-here/enter-your-repository-name-here:0.1.0-group0" + "secrets": [], + "source_image": "ghcr.io/enter-your-registry-name-here/enter-your-repository-name-here:development-group0" } } \ No newline at end of file From 0730cc89518544271487716c219ee7e3137632a2 Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 14:16:07 -0300 Subject: [PATCH 06/15] fix config --- config.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/config.toml b/config.toml index b210a16..97b512b 100644 --- a/config.toml +++ b/config.toml @@ -1,11 +1,11 @@ [repository] # The name of the github organization / person owner, e.g. tauffer-consulting. # Must be in lower-case letters -REGISTRY_NAME = "enter-your-registry-name-here" +REGISTRY_NAME = "tauffer-consulting" # The name of this Pieces repository -REPOSITORY_NAME = "enter-your-repository-name-here" -REPOSITORY_LABEL = "enter-your-repository-label-here" +REPOSITORY_NAME = "timeseries_domino_pieces" +REPOSITORY_LABEL = "Time Series Domino Pieces" # The version of this Pieces release # Attention: changing this will create a new release From 9cdd2507c81a83a356c07662c8b864fed81bb8a8 Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 14:32:12 -0300 Subject: [PATCH 07/15] fix --- pieces/ProphetPredictPiece/models.py | 4 ++-- pieces/ProphetPredictPiece/piece.py | 2 +- pieces/ProphetTrainModelPiece/models.py | 3 ++- pieces/ProphetTrainModelPiece/piece.py | 4 ++-- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pieces/ProphetPredictPiece/models.py b/pieces/ProphetPredictPiece/models.py index fb150b5..bf84de0 100644 --- a/pieces/ProphetPredictPiece/models.py +++ b/pieces/ProphetPredictPiece/models.py @@ -5,8 +5,8 @@ class InputModel(BaseModel): - model_path: str = Field( - title="Model Path", + prophet_model_path: str = Field( + title="Prophet Model Path", description="Path to the file containing the trained model." ) periods: int = Field( diff --git a/pieces/ProphetPredictPiece/piece.py b/pieces/ProphetPredictPiece/piece.py index f5d50e5..6aa4b23 100644 --- a/pieces/ProphetPredictPiece/piece.py +++ b/pieces/ProphetPredictPiece/piece.py @@ -14,7 +14,7 @@ class ProphetPredictPiece(BasePiece): """ def piece_function(self, input_data: InputModel): - with open(input_data.model_path, "rb") as f: + with open(input_data.prophet_model_path, "rb") as f: model = pickle.load(f) future = model.make_future_dataframe(periods=input_data.periods) diff --git a/pieces/ProphetTrainModelPiece/models.py b/pieces/ProphetTrainModelPiece/models.py index b1abb87..ccb586e 100644 --- a/pieces/ProphetTrainModelPiece/models.py +++ b/pieces/ProphetTrainModelPiece/models.py @@ -53,7 +53,8 @@ class InputModel(BaseModel): class OutputModel(BaseModel): - model_file_path: str = Field( + prophet_model_file_path: str = Field( + title='Prophet model path', description="Path to the file containing the trained model." ) # results_figure_file_path: str = Field( diff --git a/pieces/ProphetTrainModelPiece/piece.py b/pieces/ProphetTrainModelPiece/piece.py index 7c83608..6af429b 100644 --- a/pieces/ProphetTrainModelPiece/piece.py +++ b/pieces/ProphetTrainModelPiece/piece.py @@ -24,10 +24,10 @@ def piece_function(self, input_data: InputModel): model.fit(df) # Serialize model - model_file_path = self.results_path / "prophet_model.json" + model_file_path = Path(self.results_path) / "prophet_model.json" with open(str(model_file_path), "wb") as f: pickle.dump(model, f) return OutputModel( - model_file_path=str(model_file_path), + prophet_model_file_path=str(model_file_path), ) \ No newline at end of file From 3934828dc8cafad93bc1e7938f87097c91dbdcb2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 7 May 2024 17:34:25 +0000 Subject: [PATCH 08/15] auto-organize --- .domino/compiled_metadata.json | 12 ++++++------ .domino/dependencies_map.json | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.domino/compiled_metadata.json b/.domino/compiled_metadata.json index 6075446..279ba01 100644 --- a/.domino/compiled_metadata.json +++ b/.domino/compiled_metadata.json @@ -110,14 +110,14 @@ }, "output_schema": { "properties": { - "model_file_path": { + "prophet_model_file_path": { "description": "Path to the file containing the trained model.", - "title": "Model File Path", + "title": "Prophet model path", "type": "string" } }, "required": [ - "model_file_path" + "prophet_model_file_path" ], "title": "OutputModel", "type": "object" @@ -236,9 +236,9 @@ }, "input_schema": { "properties": { - "model_path": { + "prophet_model_path": { "description": "Path to the file containing the trained model.", - "title": "Model Path", + "title": "Prophet Model Path", "type": "string" }, "periods": { @@ -248,7 +248,7 @@ } }, "required": [ - "model_path", + "prophet_model_path", "periods" ], "title": "InputModel", diff --git a/.domino/dependencies_map.json b/.domino/dependencies_map.json index 03e0f3d..2f3484c 100644 --- a/.domino/dependencies_map.json +++ b/.domino/dependencies_map.json @@ -10,6 +10,6 @@ "ProphetPredictPiece" ], "secrets": [], - "source_image": "ghcr.io/enter-your-registry-name-here/enter-your-repository-name-here:development-group0" + "source_image": "ghcr.io/tauffer-consulting/timeseries_domino_pieces:development-group0" } } \ No newline at end of file From 6a4d24d1aa443acdcef85493d42c3b048025413e Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 14:41:50 -0300 Subject: [PATCH 09/15] update icons --- pieces/ProphetPredictPiece/metadata.json | 2 +- pieces/ProphetTrainModelPiece/metadata.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pieces/ProphetPredictPiece/metadata.json b/pieces/ProphetPredictPiece/metadata.json index ebe176c..132a362 100644 --- a/pieces/ProphetPredictPiece/metadata.json +++ b/pieces/ProphetPredictPiece/metadata.json @@ -19,6 +19,6 @@ ], "style": { "node_label": "Prophet Predict", - "icon_class_name": "fa-solid:database" + "icon_class_name": "icon-park-outline:robot-one" } } \ No newline at end of file diff --git a/pieces/ProphetTrainModelPiece/metadata.json b/pieces/ProphetTrainModelPiece/metadata.json index fc3b5c3..ba816ca 100644 --- a/pieces/ProphetTrainModelPiece/metadata.json +++ b/pieces/ProphetTrainModelPiece/metadata.json @@ -19,6 +19,6 @@ ], "style": { "node_label": "Prophet Train", - "icon_class_name": "fa-solid:database" + "icon_class_name": "icon-park-outline:robot-one" } } \ No newline at end of file From af2b2fb3438d5c85a992f28cb85de6d154012124 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 7 May 2024 17:44:01 +0000 Subject: [PATCH 10/15] auto-organize --- .domino/compiled_metadata.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.domino/compiled_metadata.json b/.domino/compiled_metadata.json index 279ba01..728b9db 100644 --- a/.domino/compiled_metadata.json +++ b/.domino/compiled_metadata.json @@ -15,7 +15,7 @@ "backgroundColor": "#ebebeb" }, "useIcon": true, - "icon_class_name": "fa-solid:database", + "icon_class_name": "icon-park-outline:robot-one", "iconStyle": { "cursor": "pointer" } @@ -218,7 +218,7 @@ "backgroundColor": "#ebebeb" }, "useIcon": true, - "icon_class_name": "fa-solid:database", + "icon_class_name": "icon-park-outline:robot-one", "iconStyle": { "cursor": "pointer" } From e2db72376b0dd538518497aedfedc42061785da5 Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 15:08:18 -0300 Subject: [PATCH 11/15] using model arguments --- pieces/ProphetTrainModelPiece/models.py | 12 ------------ pieces/ProphetTrainModelPiece/piece.py | 7 ++++++- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/pieces/ProphetTrainModelPiece/models.py b/pieces/ProphetTrainModelPiece/models.py index ccb586e..177d17e 100644 --- a/pieces/ProphetTrainModelPiece/models.py +++ b/pieces/ProphetTrainModelPiece/models.py @@ -20,18 +20,6 @@ class InputModel(BaseModel): title="Input Data File", description="Path to the input data file. Accepted formats: `.csv`, `.json`. Should use the following format: `ds` (datetime), `y` (target).", ) - # datetime_column_name: str = Field( - # description="Name of the column containing the datetime values." - # ) - # target_column_name: str = Field( - # description="Name of the column containing the target values." - # ) - test_set_percentage: float = Field( - default=20.0, - ge=1, - le=90, - description="Percentage of the data to use as test set. Default is 20%." - ) growth_trend: GrowthTrend = Field( default=GrowthTrend.linear, description="The growth trend of the data. Options are `linear`, `logistic` and `flat`. Default is `linear`." diff --git a/pieces/ProphetTrainModelPiece/piece.py b/pieces/ProphetTrainModelPiece/piece.py index 6af429b..c08efea 100644 --- a/pieces/ProphetTrainModelPiece/piece.py +++ b/pieces/ProphetTrainModelPiece/piece.py @@ -20,7 +20,12 @@ def piece_function(self, input_data: InputModel): else: raise ValueError("File format not supported. Please pass a CSV or JSON file.") - model = Prophet() + model = Prophet( + seasonality_mode=input_data.seasonality_mode, + growth=input_data.growth_trend, + changepoints=input_data.changepoints, + n_changepoints=input_data.n_changepoints + ) model.fit(df) # Serialize model From 56790f6934635709cba20593e2c3c88bf5d1e466 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 7 May 2024 18:11:08 +0000 Subject: [PATCH 12/15] auto-organize --- .domino/compiled_metadata.json | 8 -------- 1 file changed, 8 deletions(-) diff --git a/.domino/compiled_metadata.json b/.domino/compiled_metadata.json index 728b9db..b2087ea 100644 --- a/.domino/compiled_metadata.json +++ b/.domino/compiled_metadata.json @@ -57,14 +57,6 @@ "title": "Input Data File", "type": "string" }, - "test_set_percentage": { - "default": 20.0, - "description": "Percentage of the data to use as test set. Default is 20%.", - "maximum": 90.0, - "minimum": 1.0, - "title": "Test Set Percentage", - "type": "number" - }, "growth_trend": { "allOf": [ { From b5e763b514629019eccc9fb2551df11bf4688377 Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 15:17:40 -0300 Subject: [PATCH 13/15] default changepoints none --- pieces/ProphetTrainModelPiece/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pieces/ProphetTrainModelPiece/models.py b/pieces/ProphetTrainModelPiece/models.py index 177d17e..c642cf5 100644 --- a/pieces/ProphetTrainModelPiece/models.py +++ b/pieces/ProphetTrainModelPiece/models.py @@ -25,7 +25,7 @@ class InputModel(BaseModel): description="The growth trend of the data. Options are `linear`, `logistic` and `flat`. Default is `linear`." ) changepoints: List[date] = Field( - default=[], + default=None, description="List of dates at which to include potential changepoints. If not specified, potential changepoints are selected automatically." ) n_changepoints: int = Field( From 5430850411f51a5c83fba9b3cf1b3da276af8245 Mon Sep 17 00:00:00 2001 From: vinicvaz Date: Tue, 7 May 2024 15:18:04 -0300 Subject: [PATCH 14/15] update type --- pieces/ProphetTrainModelPiece/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pieces/ProphetTrainModelPiece/models.py b/pieces/ProphetTrainModelPiece/models.py index c642cf5..e84a0f2 100644 --- a/pieces/ProphetTrainModelPiece/models.py +++ b/pieces/ProphetTrainModelPiece/models.py @@ -1,6 +1,6 @@ from pydantic import BaseModel, Field from enum import Enum -from typing import List +from typing import List, Optional from datetime import date @@ -24,7 +24,7 @@ class InputModel(BaseModel): default=GrowthTrend.linear, description="The growth trend of the data. Options are `linear`, `logistic` and `flat`. Default is `linear`." ) - changepoints: List[date] = Field( + changepoints: Optional[List[date]] = Field( default=None, description="List of dates at which to include potential changepoints. If not specified, potential changepoints are selected automatically." ) From 0997ca7ad570c20c1853a52a9ea877af70f8bddc Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 7 May 2024 18:19:41 +0000 Subject: [PATCH 15/15] auto-organize --- .domino/compiled_metadata.json | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/.domino/compiled_metadata.json b/.domino/compiled_metadata.json index b2087ea..690f8cc 100644 --- a/.domino/compiled_metadata.json +++ b/.domino/compiled_metadata.json @@ -67,14 +67,21 @@ "description": "The growth trend of the data. Options are `linear`, `logistic` and `flat`. Default is `linear`." }, "changepoints": { - "default": [], + "anyOf": [ + { + "items": { + "format": "date", + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "default": null, "description": "List of dates at which to include potential changepoints. If not specified, potential changepoints are selected automatically.", - "items": { - "format": "date", - "type": "string" - }, - "title": "Changepoints", - "type": "array" + "title": "Changepoints" }, "n_changepoints": { "default": 25,