-
Notifications
You must be signed in to change notification settings - Fork 87
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Change default action for invalid target data check #4131
Changes from all commits
feaa598
da59dab
05dde4e
443db91
a8e28e7
7cb2022
f1a8163
c5ed288
cdbccd5
3894fe6
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,7 +16,6 @@ | |
DataChecks, | ||
DataCheckWarning, | ||
DateTimeFormatDataCheck, | ||
DCAOParameterType, | ||
DefaultDataChecks, | ||
InvalidTargetDataCheck, | ||
TargetDistributionDataCheck, | ||
|
@@ -27,7 +26,6 @@ | |
from evalml.problem_types import ( | ||
ProblemTypes, | ||
is_classification, | ||
is_regression, | ||
is_time_series, | ||
) | ||
|
||
|
@@ -229,24 +227,13 @@ def get_expected_messages(problem_type): | |
message="1 row(s) (20.0%) of target values are null", | ||
data_check_name="InvalidTargetDataCheck", | ||
message_code=DataCheckMessageCode.TARGET_HAS_NULL, | ||
details={"num_null_rows": 1, "pct_null_rows": 20.0}, | ||
details={"num_null_rows": 1, "pct_null_rows": 20.0, "rows": [2]}, | ||
action_options=[ | ||
DataCheckActionOption( | ||
DataCheckActionCode.IMPUTE_COL, | ||
DataCheckActionCode.DROP_ROWS, | ||
data_check_name="InvalidTargetDataCheck", | ||
parameters={ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why are we removing all the params? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is testing the default action which is now drop rows, and the drop rows action doesn't have any of the parameters required by the impute column action |
||
"impute_strategy": { | ||
"parameter_type": DCAOParameterType.GLOBAL, | ||
"type": "category", | ||
"categories": ["mean", "most_frequent"] | ||
if is_regression(problem_type) | ||
else ["most_frequent"], | ||
"default_value": "mean" | ||
if is_regression(problem_type) | ||
else "most_frequent", | ||
}, | ||
}, | ||
metadata={"is_target": True}, | ||
parameters={}, | ||
metadata={"is_target": True, "rows": [2]}, | ||
), | ||
], | ||
).to_dict(), | ||
|
@@ -475,20 +462,13 @@ def __eq__(self, series_2): | |
message="1 row(s) (20.0%) of target values are null", | ||
data_check_name="InvalidTargetDataCheck", | ||
message_code=DataCheckMessageCode.TARGET_HAS_NULL, | ||
details={"num_null_rows": 1, "pct_null_rows": 20.0}, | ||
details={"num_null_rows": 1, "pct_null_rows": 20.0, "rows": [2]}, | ||
action_options=[ | ||
DataCheckActionOption( | ||
DataCheckActionCode.IMPUTE_COL, | ||
DataCheckActionCode.DROP_ROWS, | ||
data_check_name="InvalidTargetDataCheck", | ||
parameters={ | ||
"impute_strategy": { | ||
"parameter_type": DCAOParameterType.GLOBAL, | ||
"type": "category", | ||
"categories": ["mean", "most_frequent"], | ||
"default_value": "mean", | ||
}, | ||
}, | ||
metadata={"is_target": True}, | ||
parameters={}, | ||
metadata={"is_target": True, "rows": [2]}, | ||
), | ||
], | ||
).to_dict(), | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -242,7 +242,7 @@ def test_data_checks_impute_cols(problem_type): | |
pd.Series([0, 0.1, 0.2, 0.1, 0.1]), | ||
logical_type="double", | ||
) | ||
data_check = InvalidTargetDataCheck(problem_type, objective) | ||
data_check = InvalidTargetDataCheck(problem_type, objective, null_strategy="impute") | ||
data_checks_output = data_check.validate(None, y) | ||
|
||
action_pipeline = make_pipeline_from_data_check_output( | ||
|
@@ -297,3 +297,24 @@ def test_data_checks_suggests_drop_rows(): | |
X_t, y_t = action_pipeline.transform(X, y) | ||
assert_frame_equal(X_expected, X_t) | ||
assert_series_equal(y_expected, y_t) | ||
|
||
y = pd.Series(np.concatenate([np.tile([0, 1], 49), [np.nan, np.nan]])) | ||
|
||
data_check = InvalidTargetDataCheck("binary", "Log Loss Binary") | ||
data_checks_output = data_check.validate(None, y) | ||
|
||
action_pipeline = make_pipeline_from_data_check_output("binary", data_checks_output) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. will be interesting to see how we fit these components or "datacheck pipelines" into our search pipelines! |
||
assert action_pipeline == BinaryClassificationPipeline( | ||
component_graph={"Drop Rows Transformer": [DropRowsTransformer, "X", "y"]}, | ||
parameters={"Drop Rows Transformer": {"indices_to_drop": [98, 99]}}, | ||
random_seed=0, | ||
) | ||
|
||
X_expected = X.drop([98, 99]) | ||
X_expected.ww.init() | ||
y_expected = y.drop([98, 99]).astype("Int64") | ||
|
||
action_pipeline.fit(X, y) | ||
X_t, y_t = action_pipeline.transform(X, y) | ||
assert_frame_equal(X_expected, X_t) | ||
assert_series_equal(y_expected, y_t) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
it would be nice to keep the default behavior in the docstring. Can you file an issue for it?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
For sure!
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Filed here
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
cheers!