diff --git a/tests/cpp/test_params.cpp b/tests/cpp/test_brush.cpp similarity index 100% rename from tests/cpp/test_params.cpp rename to tests/cpp/test_brush.cpp diff --git a/tests/cpp/test_data.cpp b/tests/cpp/test_data.cpp index af0678f9..be8c0a9e 100644 --- a/tests/cpp/test_data.cpp +++ b/tests/cpp/test_data.cpp @@ -1,126 +1,121 @@ -// #include "testsHeader.h" -// #include "../../src/program/program.h" -// #include "../../src/search_space.h" -// #include "../../src/program/dispatch_table.h" - -// TEST(Data, ErrorHandling) -// { -// // Creating an empty dataset throws error -// EXPECT_THROW({ -// MatrixXf X(0,0); -// ArrayXf y(0); - -// try -// { -// Dataset dt(X, y); -// } -// catch( const std::runtime_error& err ) -// { -// const string msg = err.what(); -// ASSERT_NE( -// msg.find("Error during the initialization of the dataset"), -// std::string::npos); -// throw; -// } -// }, std::runtime_error); -// } - -// TEST(Data, MixedVariableTypes) -// { -// // We need to set at least the mutation options (and respective -// // probabilities) in order to call PRG.predict() -// PARAMS["write_mutation_trace"] = true; -// PARAMS["mutation_options"] = { -// {"point",0.167}, {"insert", 0.167}, {"delete", 0.167}, {"subtree", 0.167}, {"toggle_weight_on", 0.167}, {"toggle_weight_off", 0.167} -// }; - -// MatrixXf X(5,3); -// X << 0 , 1, 0 , // binary with integer values -// 0.0, 1.0, 1.0, // binary with float values -// 2 , 1.0, -3.0, // integer with float and negative values -// 2 , 1 , 3 , // integer with integer values -// 2.1, 3.7, -5.2; // float values - -// X.transposeInPlace(); - -// ArrayXf y(3); - -// y << 6.1, 7.7, -4.2; // y = x_0 + x_1 + x_2 +#include "testsHeader.h" + + +TEST(Data, ErrorHandling) +{ + // Creating an empty dataset throws error + EXPECT_THROW({ + MatrixXf X(0,0); + ArrayXf y(0); + + try + { + Dataset dt(X, y); + } + catch( const std::runtime_error& err ) + { + const string msg = err.what(); + ASSERT_NE( + msg.find("Error during the initialization of the dataset"), + std::string::npos); + throw; + } + }, std::runtime_error); +} + +TEST(Data, MixedVariableTypes) +{ + Parameters params; + + MatrixXf X(5,3); + X << 0 , 1, 0 , // binary with integer values + 0.0, 1.0, 1.0, // binary with float values + 2 , 1.0, -3.0, // integer with float and negative values + 2 , 1 , 3 , // integer with integer values + 2.1, 3.7, -5.2; // float values + + X.transposeInPlace(); + + ArrayXf y(3); + + y << 6.1, 7.7, -4.2; // y = x_0 + x_1 + x_2 -// unordered_map user_ops = { -// {"Add", 0.5}, -// {"Sub", 0.5}, -// // a boolean operator -// {"And", 1.0}, -// {"Or", 1.0}, -// // operator that takes boolean as argument -// {"SplitOn", 1.0} -// }; - -// Dataset dt(X, y); -// SearchSpace SS; -// SS.init(dt, user_ops); - -// dt.print(); -// SS.print(); - -// for (size_t d = 5; d < 10; ++d) -// for (size_t s = 5; s < 20; ++s) -// { -// fmt::print( -// "=================================================\n" -// "depth={}, size={}. ", d, s -// ); - -// PARAMS["max_size"] = s; -// PARAMS["max_depth"] = d; - -// RegressorProgram PRG = SS.make_regressor(s-4, d-4); - -// fmt::print( -// "Tree model: {}\n", PRG.get_model("compact", true) -// ); - -// // visualizing detailed information for the model -// std::for_each(PRG.Tree.begin(), PRG.Tree.end(), -// [](const auto& n) { -// fmt::print("Name {}, node {}, feature {}\n" -// " sig_hash {}\n ret_type {}\n ret_type type {}\n", -// n.name, n.node_type, n.get_feature(), -// n.sig_hash, n.ret_type, typeid(n.ret_type).name()); -// }); - -// std::cout << std::endl; - -// fmt::print( "PRG fit\n"); -// PRG.fit(dt); -// fmt::print( "PRG predict\n"); -// ArrayXf y_pred = PRG.predict(dt); -// fmt::print( "y_pred: {}\n", y_pred); - -// // creating and fitting a child -// auto opt = PRG.mutate(); - -// if (!opt){ -// fmt::print("Mutation failed to create a child\n"); -// fmt::print("{}\n", PARAMS["mutation_trace"].get().dump()); -// } -// else { -// auto Child = opt.value(); - -// fmt::print("Child model: {}\n", Child.get_model("compact", true)); - -// fmt::print( "Child fit\n"); -// Child.fit(dt); -// fmt::print( "Child predict\n"); -// ArrayXf y_pred_child = Child.predict(dt); -// fmt::print( "y_pred: {}\n", y_pred); -// } -// } - -// // Brush exports two DispatchTable structs named dtable_fit and dtable_predict. -// // These structures holds the mapping between nodes and its corresponding -// // operations, and are used to resolve the evaluation of an expression. -// // dtable_fit.print(); -// // dtable_predict.print(); -// } \ No newline at end of file + params.functions = { + {"Add", 0.5}, + {"Sub", 0.5}, + // a boolean operator + {"And", 1.0}, + {"Or", 1.0}, + // operator that takes boolean as argument + {"SplitOn", 1.0} + }; + + Dataset dt(X, y); + SearchSpace SS; + SS.init(dt, params.functions); + + dt.print(); + SS.print(); + + for (size_t d = 5; d < 10; ++d) + for (size_t s = 5; s < 20; ++s) + { + fmt::print( + "=================================================\n" + "depth={}, size={}. ", d, s + ); + + params.max_size = s; + params.max_depth = d; + + // TODO: update all calls of make_ to use params + RegressorProgram PRG = SS.make_regressor(0, 0, params); + + fmt::print( + "Tree model: {}\n", PRG.get_model("compact", true) + ); + + // visualizing detailed information for the model + std::for_each(PRG.Tree.begin(), PRG.Tree.end(), + [](const auto& n) { + fmt::print("Name {}, node {}, feature {}\n" + " sig_hash {}\n ret_type {}\n ret_type type {}\n", + n.name, n.node_type, n.get_feature(), + n.sig_hash, n.ret_type, typeid(n.ret_type).name()); + }); + std::cout << std::endl; + + fmt::print( "PRG fit\n"); + PRG.fit(dt); + + fmt::print( "PRG predict\n"); + ArrayXf y_pred = PRG.predict(dt); + fmt::print( "y_pred: {}\n", y_pred); + + // creating and fitting a child + Variation variator = Variation(params, SS); + std::optional opt = variator.mutate(PRG); + + if (!opt){ + fmt::print("Mutation failed to create a child\n"); + } + else { + auto Child = opt.value(); + + fmt::print("Child model: {}\n", Child.get_model("compact", true)); + + fmt::print( "Child fit\n"); + Child.fit(dt); + + fmt::print( "Child predict\n"); + ArrayXf y_pred_child = Child.predict(dt); + fmt::print( "y_pred: {}\n", y_pred); + } + } + + // Brush exports two DispatchTable structs named dtable_fit and dtable_predict. + // These structures holds the mapping between nodes and its corresponding + // operations, and are used to resolve the evaluation of an expression. + // dtable_fit.print(); + // dtable_predict.print(); +} \ No newline at end of file diff --git a/tests/cpp/test_population.cpp b/tests/cpp/test_population.cpp index e69de29b..68a407ee 100644 --- a/tests/cpp/test_population.cpp +++ b/tests/cpp/test_population.cpp @@ -0,0 +1,18 @@ +#include "testsHeader.h" + +TEST(Population, PopulationTests) +{ + + Population pop; + + // size, + // island sizes growns and comes back to the same, + // update and prep offspring slots. + // no overlap in island indexes. + // works with even and uneven pop sizes. + // initialize population works? + // migrate? + // print models + +} + diff --git a/tests/cpp/test_program.cpp b/tests/cpp/test_program.cpp index 2d741f68..69d5819b 100644 --- a/tests/cpp/test_program.cpp +++ b/tests/cpp/test_program.cpp @@ -11,12 +11,16 @@ TEST(Program, MakeRegressor) SearchSpace SS; SS.init(data); + Parameters params; // Program DXtree; for (int d = 1; d < 10; ++d) for (int s = 1; s < 10; ++s) { - RegressorProgram PRG = SS.make_regressor(d, s); + params.max_size = s; + params.max_depth = d; + + RegressorProgram PRG = SS.make_regressor(0, 0, params); fmt::print( "=================================================\n" "Tree model for depth = {}, size= {}: {}\n", @@ -57,17 +61,23 @@ TEST(Program, MakeRegressor) TEST(Program, FitRegressor) { - + Parameters params; + Dataset data = Data::read_csv("docs/examples/datasets/d_enc.csv","label"); SearchSpace SS; SS.init(data); + dtable_fit.print(); dtable_predict.print(); + // for (int t = 0; t < 10; ++t) { for (int d = 1; d < 10; ++d) { for (int s = 1; s < 100; s+=10) { - RegressorProgram PRG = SS.make_regressor(d, s); + params.max_size = s; + params.max_depth = d; + + RegressorProgram PRG = SS.make_regressor(0, 0, params); fmt::print( "=================================================\n" "Tree model for depth = {}, size= {}: {}\n" @@ -83,27 +93,36 @@ TEST(Program, FitRegressor) TEST(Program, PredictWithWeights) { + Parameters params; Dataset data = Data::read_csv("docs/examples/datasets/d_enc.csv","label"); SearchSpace SS; SS.init(data); + dtable_fit.print(); dtable_predict.print(); + // for (int t = 0; t < 10; ++t) { for (int d = 1; d < 10; ++d) { for (int s = 1; s < 10; s+=10) { - RegressorProgram PRG = SS.make_regressor(d, s); + params.max_size = s; + params.max_depth = d; + + RegressorProgram PRG = SS.make_regressor(0, 0, params); fmt::print( "=================================================\n" "Tree model for depth = {}, size= {}: {}\n" "=================================================\n", d, s, PRG.get_model("compact", true) ); + PRG.fit(data); auto y = PRG.predict(data); + auto weights = PRG.get_weights(); auto yweights = PRG.predict_with_weights(data, weights); + for (int i = 0; i < y.size(); ++i){ if (std::isnan(y(i))) ASSERT_TRUE(std::isnan(y(i))); @@ -117,6 +136,7 @@ TEST(Program, PredictWithWeights) TEST(Program, FitClassifier) { + Parameters params; Dataset data = Data::read_csv("docs/examples/datasets/d_analcatdata_aids.csv","target"); SearchSpace SS; @@ -124,7 +144,12 @@ TEST(Program, FitClassifier) for (int d = 1; d < 10; ++d) { for (int s = 1; s < 100; s+=10) { - auto PRG = SS.make_classifier(d, s); + + params.max_size = s; + params.max_depth = d; + + auto PRG = SS.make_classifier(0, 0, params); + fmt::print( "=================================================\n" "Tree model for depth = {}, size= {}: {}\n" @@ -140,6 +165,8 @@ TEST(Program, FitClassifier) TEST(Program, Serialization) { + Parameters params; + // test mutation // TODO: set random seed MatrixXf X(10,2); @@ -159,7 +186,10 @@ TEST(Program, Serialization) { for (int s = 1; s < 10; ++s) { - RegressorProgram PRG = SS.make_regressor(d, s); + params.max_size = s; + params.max_depth = d; + + RegressorProgram PRG = SS.make_regressor(0, 0, params); fmt::print( "=================================================\n" "depth = {}, size= {}\n" @@ -171,12 +201,15 @@ TEST(Program, Serialization) ArrayXf y_pred = PRG.predict(data); json PRGjson = PRG; fmt::print( "json of initial model: {}\n", PRGjson.dump(2)); + // auto newPRG = PRGjson.get(); RegressorProgram newPRG = PRGjson; json newPRGjson = newPRG; + fmt::print( "json of loaded model: {}\n", newPRGjson.dump(2)); fmt::print("Initial Model: {}\n",PRG.get_model("compact", true)); fmt::print("Loaded Model: {}\n",newPRG.get_model("compact", true)); + ASSERT_TRUE( std::equal(PRG.Tree.begin(), PRG.Tree.end(), newPRG.Tree.begin()) ); @@ -203,19 +236,21 @@ TEST(Operators, ProgramSizeAndDepthPARAMS) Dataset data(X,y); + Parameters params; + SearchSpace SS; SS.init(data); - for (int d = 1; d < 10; ++d) + for (int d = 1; d < 6; ++d) { - for (int s = 1; s < 10; ++s) + for (int s = 10; s < 20; ++s) { - PARAMS["max_size"] = s; - PARAMS["max_depth"] = d; + params.max_size = s; + params.max_depth = d; fmt::print("d={},s={}\n",d,s); fmt::print("make_regressor\n"); - RegressorProgram PRG = SS.make_regressor(0, 0); + RegressorProgram PRG = SS.make_regressor(0, 0, params); fmt::print( "depth = {}, size= {}\n" diff --git a/tests/cpp/test_variation.cpp b/tests/cpp/test_variation.cpp index 44ff612c..7cae962b 100644 --- a/tests/cpp/test_variation.cpp +++ b/tests/cpp/test_variation.cpp @@ -1,545 +1,581 @@ -// #include "testsHeader.h" -// #include "../../src/search_space.h" -// #include "../../src/program/program.h" -// #include "../../src/program/dispatch_table.h" -// #include "../../src/data/io.h" - -// TEST(Variation, FixedRootDoesntChange) -// { -// PARAMS["mutation_options"] = { -// {"point",0.167}, {"insert", 0.167}, {"delete", 0.167}, {"subtree", 0.167}, {"toggle_weight_on", 0.167}, {"toggle_weight_off", 0.167} -// }; -// PARAMS["max_size"] = 20; -// PARAMS["max_depth"] = 10; - -// MatrixXf X(10,2); -// ArrayXf y(10); -// X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, -// 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, - -// 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, -// 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; - -// y << 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0; - -// Dataset data(X,y); - -// SearchSpace SS; -// SS.init(data); - -// auto logistic_hash = Signature().hash(); - -// for (int d = 1; d < 10; ++d) -// { -// for (int s = 1; s < 10; ++s) -// { -// int successes = 0; -// for (int attempt = 0; attempt < 10; ++attempt) -// { -// // different program types changes how predict works (and the rettype of predict) -// ClassifierProgram PRG = SS.make_classifier(d, s); -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model 1: {}\n", -// d, s, -// PRG.get_model("compact", true) -// ); - -// Node root = *(PRG.Tree.begin()); -// ASSERT_TRUE(root.node_type == NodeType::Logistic); -// ASSERT_TRUE(root.ret_type == DataType::ArrayF); -// ASSERT_TRUE(root.sig_hash == logistic_hash); -// ASSERT_TRUE(root.get_prob_change()==0.0); -// ASSERT_TRUE(root.fixed==true); - -// auto opt_mutation = PRG.mutate(); -// if (opt_mutation) -// { -// successes += 1; -// auto Mut_Child = opt_mutation.value(); -// fmt::print("After mutation : {}\n", -// Mut_Child.get_model("compact", true)); - -// Node mut_child_root = *(Mut_Child.Tree.begin()); -// ASSERT_TRUE(mut_child_root.node_type == NodeType::Logistic); -// ASSERT_TRUE(mut_child_root.ret_type == DataType::ArrayF); -// ASSERT_TRUE(mut_child_root.sig_hash == logistic_hash); -// ASSERT_TRUE(mut_child_root.get_prob_change()==0.0); -// ASSERT_TRUE(mut_child_root.fixed==true); -// } - -// ClassifierProgram PRG2 = SS.make_classifier(d, s); -// auto opt_cx = PRG.cross(PRG2); -// if (opt_cx) -// { -// successes += 1; -// auto CX_Child = opt_cx.value(); -// fmt::print("After crossover: {}\n", -// CX_Child.get_model("compact", true)); - -// Node cx_child_root = *(CX_Child.Tree.begin()); -// ASSERT_TRUE(cx_child_root.node_type == NodeType::Logistic); -// ASSERT_TRUE(cx_child_root.ret_type == DataType::ArrayF); -// ASSERT_TRUE(cx_child_root.sig_hash == logistic_hash); -// ASSERT_TRUE(cx_child_root.get_prob_change()==0.0); -// ASSERT_TRUE(cx_child_root.fixed==true); -// } - -// // root remained unchanged -// ASSERT_TRUE(root.node_type == NodeType::Logistic); -// ASSERT_TRUE(root.ret_type == DataType::ArrayF); -// ASSERT_TRUE(root.sig_hash == logistic_hash); -// ASSERT_TRUE(root.get_prob_change()==0.0); -// ASSERT_TRUE(root.fixed==true); -// } -// ASSERT_TRUE(successes > 0); -// } -// } -// } - -// TEST(Variation, InsertMutationWorks) -// { -// // TODO: this tests could be parameterized. -// // To understand design implementation of this test, check Mutation test - -// PARAMS["mutation_options"] = { -// {"point", 0.0}, {"insert", 1.0}, {"delete", 0.0}, {"subtree", 0.0}, {"toggle_weight_on", 0.0}, {"toggle_weight_off", 0.0} -// }; - -// // retrieving the options to check if everything was set right -// std::cout << "Initial mutation configuration" << std::endl; -// auto options = PARAMS["mutation_options"].get>(); -// for (const auto& [k, v] : options) -// std::cout << k << " : " << v << std::endl; - -// MatrixXf X(10,2); -// ArrayXf y(10); -// X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, -// 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, - -// 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, -// 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; - -// y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, -// 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; - -// Dataset data(X,y); - -// SearchSpace SS; -// SS.init(data); - -// int successes = 0; -// for (int attempt = 0; attempt < 100; ++attempt) -// { -// // we need to have big values here so the mutation will work -// // (when the xmen child exceeds the maximum limits, mutation returns -// // std::nullopt) -// PARAMS["max_size"] = 20; -// PARAMS["max_depth"] = 10; - -// fmt::print("d={},s={}\n", PARAMS["max_depth"].get(), PARAMS["max_size"].get()); -// fmt::print("make_regressor\n"); +#include "testsHeader.h" + +TEST(Variation, FixedRootDoesntChange) +{ + Parameters params; + + MatrixXf X(10,2); + ArrayXf y(10); + X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, + 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, + + 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, + 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; + + y << 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0; + + Dataset data(X,y); + + SearchSpace SS; + SS.init(data); -// // creating a "small" program (with a plenty amount of space to insert stuff) -// RegressorProgram PRG = SS.make_regressor(5, 5); + auto logistic_hash = Signature().hash(); -// fmt::print("PRG.fit(data);\n"); -// PRG.fit(data); -// ArrayXf y_pred = PRG.predict(data); + // TODO: use these values for d and s in all tests (not 1, 1 for example) + for (int d = 3; d < 6; ++d) + { + for (int s = 10; s < 50; ++s) + { + params.max_size = s; + params.max_depth = d; + + Variation variator = Variation(params, SS); + + int successes = 0; + for (int attempt = 0; attempt < 10; ++attempt) + { + // different program types changes how predict works (and the rettype of predict) + ClassifierProgram PRG = SS.make_classifier(0, 0, params); + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model 1: {}\n", + d, s, + PRG.get_model("compact", true) + ); + + Node root = *(PRG.Tree.begin()); + + ASSERT_TRUE(root.node_type == NodeType::Logistic); + ASSERT_TRUE(root.ret_type == DataType::ArrayF); + ASSERT_TRUE(root.sig_hash == logistic_hash); + ASSERT_TRUE(root.get_prob_change()==0.0); + ASSERT_TRUE(root.fixed==true); + + auto opt_mutation = variator.mutate(PRG); + + if (opt_mutation) + { + successes += 1; + auto Mut_Child = opt_mutation.value(); + fmt::print("After mutation : {}\n", + Mut_Child.get_model("compact", true)); + + Node mut_child_root = *(Mut_Child.Tree.begin()); + + ASSERT_TRUE(mut_child_root.node_type == NodeType::Logistic); + ASSERT_TRUE(mut_child_root.ret_type == DataType::ArrayF); + ASSERT_TRUE(mut_child_root.sig_hash == logistic_hash); + ASSERT_TRUE(mut_child_root.get_prob_change()==0.0); + ASSERT_TRUE(mut_child_root.fixed==true); + } + + ClassifierProgram PRG2 = SS.make_classifier(0, 0, params); + auto opt_cx = variator.cross(PRG, PRG2); + + if (opt_cx) + { + successes += 1; + auto CX_Child = opt_cx.value(); + fmt::print("After crossover: {}\n", + CX_Child.get_model("compact", true)); + + Node cx_child_root = *(CX_Child.Tree.begin()); + + ASSERT_TRUE(cx_child_root.node_type == NodeType::Logistic); + ASSERT_TRUE(cx_child_root.ret_type == DataType::ArrayF); + ASSERT_TRUE(cx_child_root.sig_hash == logistic_hash); + ASSERT_TRUE(cx_child_root.get_prob_change()==0.0); + ASSERT_TRUE(cx_child_root.fixed==true); + } + + // root remained unchanged + ASSERT_TRUE(root.node_type == NodeType::Logistic); + ASSERT_TRUE(root.ret_type == DataType::ArrayF); + ASSERT_TRUE(root.sig_hash == logistic_hash); + ASSERT_TRUE(root.get_prob_change()==0.0); + ASSERT_TRUE(root.fixed==true); + } + ASSERT_TRUE(successes > 0); + } + } +} + +TEST(Variation, InsertMutationWorks) +{ + // TODO: this tests could be parameterized (one type of mutation each). + // To understand design implementation of this test, check Mutation test + + Parameters params; + params.mutation_probs = { + {"point", 0.0}, + {"insert", 1.0}, + {"delete", 0.0}, + {"subtree", 0.0}, + {"toggle_weight_on", 0.0}, + {"toggle_weight_off", 0.0} + }; + + // retrieving the options to check if everything was set right + std::cout << "Initial mutation configuration" << std::endl; + for (const auto& [k, v] : params.mutation_probs) + std::cout << k << " : " << v << std::endl; + + MatrixXf X(10,2); + ArrayXf y(10); + X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, + 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, + + 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, + 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; + + y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, + 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; + + Dataset data(X,y); + + SearchSpace SS; + SS.init(data); + + Variation variator = Variation(params, SS); + + int successes = 0; + for (int attempt = 0; attempt < 100; ++attempt) + { + params.max_size = 50; + params.max_depth = 6; + + fmt::print("d={},s={}\n", params.max_depth, params.max_size); + fmt::print("make_regressor\n"); + + // creating a "small" program (with a plenty amount of space to insert stuff) + RegressorProgram PRG = SS.make_regressor(5, 5, params); + + fmt::print("PRG.fit(data);\n"); + PRG.fit(data); + ArrayXf y_pred = PRG.predict(data); -// // applying mutation and checking if the optional result is non-empty -// fmt::print("auto Child = PRG.mutate();\n"); -// auto opt = PRG.mutate(); // We should assume that it will be always the insert mutation - -// if (opt){ -// successes += 1; -// auto Child = opt.value(); -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model: {}\n" -// "Mutated Model: {}\n", -// PARAMS["max_depth"].get(), PARAMS["max_size"].get(), -// PRG.get_model("compact", true), -// Child.get_model("compact", true) -// ); - -// fmt::print("child fit\n"); -// Child.fit(data); -// y_pred = Child.predict(data); - -// // since we successfully inserted a node, this should be always true -// ASSERT_TRUE(Child.size() > PRG.size()); - -// // maybe the insertion spot was a shorter branch than the maximum -// // depth. At least, xmen depth should be equal to its parent -// ASSERT_TRUE(Child.depth() >= PRG.depth()); -// } - -// // lets also see if it always fails when the child exceeds the maximum limits -// PARAMS["max_size"] = PRG.size(); -// PARAMS["max_depth"] = PRG.depth(); - -// auto opt2 = PRG.mutate(); -// if (opt2){ // This shoudl't happen. We'll print then error -// auto Child2 = opt2.value(); - -// std::cout << "Fail failed. Mutation weights:" << std::endl; -// auto options2 = PARAMS["mutation_options"].get>(); -// for (const auto& [k, v] : options2) -// std::cout << k << " : " << v << std::endl; - -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model: {}\n" -// "Mutated Model: {}\n", -// PARAMS["max_depth"].get(), PARAMS["max_size"].get(), -// PRG.get_model("compact", true), -// Child2.get_model("compact", true) -// ); -// ASSERT_TRUE(opt2==std::nullopt); -// } -// } -// ASSERT_TRUE(successes > 0); -// } - -// TEST(Variation, Mutation) -// { -// PARAMS["write_mutation_trace"] = true; -// PARAMS["mutation_options"] = { -// {"point",0.167}, {"insert", 0.167}, {"delete", 0.167}, {"subtree", 0.167}, {"toggle_weight_on", 0.167}, {"toggle_weight_off", 0.167} -// }; + // applying mutation and checking if the optional result is non-empty + fmt::print("auto Child = PRG.mutate();\n"); + + // We should assume that it will be always the insert mutation + auto opt = variator.mutate(PRG); + + if (opt){ + successes += 1; + auto Child = opt.value(); + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model: {}\n" + "Mutated Model: {}\n", + params.max_depth, params.max_size, + PRG.get_model("compact", true), + Child.get_model("compact", true) + ); + + fmt::print("child fit\n"); + Child.fit(data); + y_pred = Child.predict(data); + + // since we successfully inserted a node, this should be always true + ASSERT_TRUE(Child.size() > PRG.size()); + + // maybe the insertion spot was a shorter branch than the maximum + // depth. At least, xmen depth should be equal to its parent + ASSERT_TRUE(Child.depth() >= PRG.depth()); + } + + // lets also see if it always fails when the child exceeds the maximum limits + params.max_size = PRG.size(); + params.max_depth = PRG.depth(); + + auto opt2 = variator.mutate(PRG); + if (opt2){ // This shoudl't happen. We'll print then error + auto Child2 = opt2.value(); + + std::cout << "Fail failed. Mutation weights:" << std::endl; + for (const auto& [k, v] : params.mutation_probs) + std::cout << k << " : " << v << std::endl; + + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model: {}\n" + "Mutated Model: {}\n", + params.max_depth, params.max_size, + PRG.get_model("compact", true), + Child2.get_model("compact", true) + ); + ASSERT_TRUE(opt2==std::nullopt); + } + } + ASSERT_TRUE(successes > 0); +} + +TEST(Variation, Mutation) +{ + Parameters params; -// MatrixXf X(10,2); -// ArrayXf y(10); -// X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, -// 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, - -// 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, -// 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; - -// y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, -// 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; - -// Dataset data(X,y); - -// SearchSpace SS; -// SS.init(data); - -// int successes = 0; -// for (int d = 1; d < 10; ++d) -// { -// for (int s = 1; s < 10; ++s) -// { -// fmt::print("d={},s={}\n",d,s); -// fmt::print("make_regressor\n"); - -// // if we set max_size and max_depth to zero, it will use the -// // values in the global PARAMS. Otherwise, it will respect the -// // values passed as argument. -// RegressorProgram PRG = SS.make_regressor(d, s); - -// fmt::print("PRG.fit(data);\n"); -// PRG.fit(data); -// ArrayXf y_pred = PRG.predict(data); + MatrixXf X(10,2); + ArrayXf y(10); + X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, + 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, + + 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, + 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; + + y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, + 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; + + Dataset data(X,y); + + SearchSpace SS; + SS.init(data); + + int successes = 0; + for (int d = 1; d < 6; ++d) + { + for (int s = 10; s < 20; ++s) + { + params.max_size = s; + params.max_depth = d; + + Variation variator = Variation(params, SS); + + fmt::print("d={},s={}\n",d,s); + fmt::print("make_regressor\n"); + + // if we set max_size and max_depth to zero, it will use the + // values in the global PARAMS. Otherwise, it will respect the + // values passed as argument. + RegressorProgram PRG = SS.make_regressor(0, 0, params); + + fmt::print("PRG.fit(data);\n"); + PRG.fit(data); + + // saving a string representation + auto PRG_model = PRG.get_model("compact", true); + + fmt::print( + "=================================================\n" + "Original model (BEFORE MUTATION) 1: {}\n", + PRG.get_model("compact", true) + ); + ArrayXf y_pred = PRG.predict(data); -// // applying mutation and checking if the optional result is non-empty -// fmt::print("auto Child = PRG.mutate();\n"); -// auto opt = PRG.mutate(); - -// if (!opt){ -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model: {}\n" -// "Mutation failed to create a child", -// d, s, -// PRG.get_model("compact", true) -// ); -// fmt::print("{}", PARAMS["mutation_trace"].get().dump()); -// } -// else { -// successes += 1; -// auto Child = opt.value(); -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model: {}\n" -// "Mutated Model: {}\n", -// d, s, -// PRG.get_model("compact", true), -// Child.get_model("compact", true) -// ); - -// fmt::print("child fit\n"); -// Child.fit(data); -// y_pred = Child.predict(data); -// } -// } -// } -// // since x1 and x2 have same type, we shoudn't get fails -// ASSERT_TRUE(successes > 0); -// } - -// TEST(Variation, MutationSizeAndDepthLimit) -// { -// PARAMS["write_mutation_trace"] = true; -// PARAMS["mutation_options"] = { -// {"point",0.167}, {"insert", 0.167}, {"delete", 0.167}, {"subtree", 0.167}, {"toggle_weight_on", 0.167}, {"toggle_weight_off", 0.167} -// }; + // applying mutation and checking if the optional result is non-empty + fmt::print("auto Child = PRG.mutate();\n"); + auto opt = variator.mutate(PRG); + + if (!opt){ + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model: {}\n" + "Mutation failed to create a child", + d, s, + PRG.get_model("compact", true) + ); + } + else { + successes += 1; + auto Child = opt.value(); + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model: {}\n" + "Mutated Model: {}\n", + d, s, + PRG.get_model("compact", true), + Child.get_model("compact", true) + ); + + fmt::print("child fit\n"); + Child.fit(data); + y_pred = Child.predict(data); + + // no collateral effect (parent still the same) + ASSERT_TRUE(PRG_model == PRG.get_model("compact", true)); + } + } + } + // since x1 and x2 have same type, we shoudn't get fails + ASSERT_TRUE(successes > 0); +} + +TEST(Variation, MutationSizeAndDepthLimit) +{ + Parameters params; -// MatrixXf X(10,2); -// ArrayXf y(10); -// X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, -// 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, + MatrixXf X(10,2); + ArrayXf y(10); + X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, + 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, -// 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, -// 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; + 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, + 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; -// y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, -// 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; + y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, + 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; -// Dataset data(X,y); + Dataset data(X,y); -// SearchSpace SS; -// SS.init(data); + SearchSpace SS; + SS.init(data); -// // prod operator --> arity 4: prod(T1, T2, T3) -// // split best --> arity 6: if(terminal > value, T_case_true, T_case_false) -// int max_arity = 6; - -// int successes = 0; -// for (int d = 5; d < 15; ++d) -// { -// for (int s = 5; s < 15; ++s) -// { -// PARAMS["max_size"] = s; -// PARAMS["max_depth"] = d; - -// fmt::print("d={},s={}\n",d,s); -// fmt::print("make_regressor\n"); - -// // Enforcing that the parents does not exceed max_size by -// // taking into account the highest arity of the function nodes; -// // and the max_depth+1 that PTC2 can generate -// RegressorProgram PRG = SS.make_regressor(d-1, s - max_arity); + // prod operator --> arity 4: prod(T1, T2, T3) + // split best --> arity 6: if(terminal > value, T_case_true, T_case_false) + int max_arity = 6; + + int successes = 0; + for (int d = 1; d < 6; ++d) + { + for (int s = 5; s < 15; ++s) + { + params.max_size = s; + params.max_depth = d; -// auto PRG_model = PRG.get_model("compact", true); - -// auto opt = PRG.mutate(); - -// if (!opt){ -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model: {}\n" -// "Mutation failed to create a child", -// d, s, -// PRG.get_model("compact", true) -// ); -// fmt::print("{}", PARAMS["mutation_trace"].get().dump()); -// } -// else { -// successes += 1; + // creating and fitting a child + Variation variator = Variation(params, SS); + + fmt::print("d={},s={}\n",d,s); + fmt::print("make_regressor\n"); + + // Enforcing that the parents does not exceed max_size by + // taking into account the highest arity of the function nodes; + // and the max_depth+1 that PTC2 can generate + RegressorProgram PRG = SS.make_regressor(0, 0, params); + + auto PRG_model = PRG.get_model("compact", true); + auto opt = variator.mutate(PRG); + + if (!opt){ + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model: {}\n" + "Mutation failed to create a child", + d, s, + PRG.get_model("compact", true) + ); + } + else { + successes += 1; -// // Extracting the child from the std::optional and checking -// // if it is within size and depth restrictions. There is no -// // margin for having slightly bigger expressions. -// auto Child = opt.value(); + // Extracting the child from the std::optional and checking + // if it is within size and depth restrictions. There is no + // margin for having slightly bigger expressions. + auto Child = opt.value(); -// fmt::print("print\n"); -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model: {}\n" -// "Mutated Model: {}\n" -// "Mutated depth: {}\n" -// "Mutated size : {}\n", -// d, s, -// PRG.get_model("compact", true), -// Child.get_model("compact", true), -// Child.depth(), -// Child.size() -// ); - -// // Original didn't change -// ASSERT_TRUE(PRG_model == PRG.get_model("compact", true)); + fmt::print("print\n"); + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model: {}\n" + "Mutated Model: {}\n" + "Mutated depth: {}\n" + "Mutated size : {}\n", + d, s, + PRG.get_model("compact", true), + Child.get_model("compact", true), + Child.depth(), + Child.size() + ); + + // Original didn't change + ASSERT_TRUE(PRG_model == PRG.get_model("compact", true)); -// ASSERT_TRUE(Child.size() > 0); -// ASSERT_TRUE(Child.size() <= s); - -// ASSERT_TRUE(Child.size() > 0); -// ASSERT_TRUE(Child.size() <= s); - -// ASSERT_TRUE(Child.depth() >= 0); -// ASSERT_TRUE(Child.depth() <= d); -// } -// } -// } -// ASSERT_TRUE(successes > 0); -// } - -// TEST(Variation, Crossover) -// { -// MatrixXf X(10,2); -// ArrayXf y(10); -// X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, -// 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, - -// 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, -// 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; - -// y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, -// 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; - -// Dataset data(X,y); - -// SearchSpace SS; -// SS.init(data); - -// int successes = 0; -// for (int d = 1; d < 10; ++d) -// { -// for (int s = 1; s < 10; ++s) -// { -// RegressorProgram PRG1 = SS.make_regressor(d, s); -// RegressorProgram PRG2 = SS.make_regressor(d, s); -// PRG1.fit(data); -// PRG2.fit(data); - -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Initial Model 1: {}\n" -// "Initial Model 2: {}\n", -// d, s, -// PRG1.get_model("compact", true), -// PRG2.get_model("compact", true) -// ); - -// ArrayXf y_pred = PRG1.predict(data); -// fmt::print("cross one\n"); - -// auto opt = PRG1.cross(PRG2); -// if (!opt){ -// fmt::print( -// "=================================================\n" -// "depth = {}, size= {}\n" -// "Original model 1: {}\n" -// "Original model 2: {}\n", -// "Crossover failed to create a child", -// d, s, -// PRG1.get_model("compact", true), -// PRG2.get_model("compact", true) -// ); -// } -// else { -// successes += 1; -// auto Child = opt.value(); -// fmt::print( -// "Original model 1 after cross: {}\n" -// "Original model 2 after cross: {}\n", -// PRG1.get_model("compact", true), -// PRG2.get_model("compact", true) -// ); -// fmt::print( -// "Crossed Model: {}\n" -// "=================================================\n", -// Child.get_model("compact", true) -// ); -// Child.fit(data); -// auto child_pred1 = Child.predict(data); -// } -// } -// } -// ASSERT_TRUE(successes > 0); -// } - -// TEST(Variation, CrossoverSizeAndDepthLimit) -// { -// MatrixXf X(10,2); -// ArrayXf y(10); -// X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, -// 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, - -// 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, -// 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; - -// y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, -// 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; - -// Dataset data(X,y); - -// SearchSpace SS; -// SS.init(data); - -// // prod operator --> arity 4: prod(T1, T2, T3) -// // split best --> arity 6: if(terminal > value, T_case_true, T_case_false) -// int max_arity = 6; - -// int successes = 0; -// for (int d = 5; d < 15; ++d) -// { -// for (int s = 5; s < 15; ++s) -// { -// PARAMS["max_size"] = s; -// PARAMS["max_depth"] = d; - -// // Enforcing that the parents does not exceed max_size by -// // taking into account the highest arity of the function nodes -// RegressorProgram PRG1 = SS.make_regressor(d-1, s-max_arity); -// RegressorProgram PRG2 = SS.make_regressor(d-1, s-max_arity); - -// auto PRG1_model = PRG1.get_model("compact", true); -// auto PRG2_model = PRG2.get_model("compact", true); - -// fmt::print( -// "=================================================\n" -// "settings: depth = {}, size= {}\n" -// "Original model 1: {}\n" -// "depth = {}, size= {}\n" -// "Original model 2: {}\n" -// "depth = {}, size= {}\n", -// d, s, -// PRG1.get_model("compact", true), -// PRG1.depth(), PRG1.size(), -// PRG2.get_model("compact", true), -// PRG2.depth(), PRG2.size() -// ); - -// fmt::print("cross\n"); -// auto opt = PRG1.cross(PRG2); - -// if (!opt){ -// fmt::print("Crossover failed to create a child" -// "=================================================\n"); -// } -// else { -// successes += 1; -// auto Child = opt.value(); -// fmt::print( -// "Child Model : {}\n" -// "Child Model depth: {}\n" -// "Child Model size : {}\n" -// "=================================================\n", -// Child.get_model("compact", true), -// Child.depth(), Child.size() -// ); - -// // Original didn't change -// ASSERT_TRUE(PRG1_model == PRG1.get_model("compact", true)); -// ASSERT_TRUE(PRG2_model == PRG2.get_model("compact", true)); - -// // Child is within restrictions -// ASSERT_TRUE(Child.size() > 0); -// ASSERT_TRUE(Child.size() <= s); - -// ASSERT_TRUE(Child.depth() >= 0); -// ASSERT_TRUE(Child.depth() <= d); -// } -// } -// } -// ASSERT_TRUE(successes > 0); -// } \ No newline at end of file + ASSERT_TRUE(Child.size() > 0); + ASSERT_TRUE(Child.size() <= s); + + ASSERT_TRUE(Child.size() > 0); + ASSERT_TRUE(Child.size() <= s); + + ASSERT_TRUE(Child.depth() >= 0); + ASSERT_TRUE(Child.depth() <= d); + } + } + } + ASSERT_TRUE(successes > 0); +} + +TEST(Variation, Crossover) +{ + Parameters params; + + MatrixXf X(10,2); + ArrayXf y(10); + X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, + 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, + + 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, + 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; + + y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, + 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; + + Dataset data(X,y); + + SearchSpace SS; + SS.init(data); + + int successes = 0; + for (int d = 2; d < 6; ++d) + { + for (int s = 5; s < 15; ++s) + { + params.max_size = s; + params.max_depth = d; + Variation variator = Variation(params, SS); + + RegressorProgram PRG1 = SS.make_regressor(d, 0, params); + PRG1.fit(data); + auto PRG1_model = PRG1.get_model("compact", true); + + RegressorProgram PRG2 = SS.make_regressor(d, 0, params); + PRG2.fit(data); + auto PRG2_model = PRG2.get_model("compact", true); + + + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Initial Model 1: {}\n" + "Initial Model 2: {}\n", + d, s, + PRG1.get_model("compact", true), + PRG2.get_model("compact", true) + ); + + ArrayXf y_pred = PRG1.predict(data); + fmt::print("cross one\n"); + + auto opt = variator.cross(PRG1, PRG2); + if (!opt){ + fmt::print( + "=================================================\n" + "depth = {}, size= {}\n" + "Original model 1: {}\n" + "Original model 2: {}\n", + "Crossover failed to create a child", + d, s, + PRG1.get_model("compact", true), + PRG2.get_model("compact", true) + ); + } + else { + successes += 1; + auto Child = opt.value(); + fmt::print( + "Original model 1 after cross: {}\n" + "Original model 2 after cross: {}\n", + PRG1.get_model("compact", true), + PRG2.get_model("compact", true) + ); + fmt::print( + "Crossed Model: {}\n" + "=================================================\n", + Child.get_model("compact", true) + ); + Child.fit(data); + auto child_pred1 = Child.predict(data); + + // no collateral effect (parent still the same) + ASSERT_TRUE(PRG1_model == PRG1.get_model("compact", true)); + ASSERT_TRUE(PRG2_model == PRG2.get_model("compact", true)); + } + } + } + ASSERT_TRUE(successes > 0); +} + +TEST(Variation, CrossoverSizeAndDepthLimit) +{ + Parameters params; + + MatrixXf X(10,2); + ArrayXf y(10); + X << 0.85595296, 0.55417453, 0.8641915 , 0.99481109, 0.99123376, + 0.9742618 , 0.70894019, 0.94940306, 0.99748867, 0.54205151, + + 0.5170537 , 0.8324005 , 0.50316305, 0.10173936, 0.13211973, + 0.2254195 , 0.70526861, 0.31406024, 0.07082619, 0.84034526; + + y << 3.55634251, 3.13854087, 3.55887523, 3.29462895, 3.33443517, + 3.4378868 , 3.41092345, 3.5087468 , 3.25110243, 3.11382179; + + Dataset data(X,y); + + SearchSpace SS; + SS.init(data); + + // prod operator --> arity 4: prod(T1, T2, T3) + // split best --> arity 6: if(terminal > value, T_case_true, T_case_false) + int max_arity = 6; + + int successes = 0; + for (int d = 1; d < 6; ++d) + { + for (int s = 5; s < 15; ++s) + { + params.max_size = s; + params.max_depth = d; + Variation variator = Variation(params, SS); + + // Enforcing that the parents does not exceed max_size by + // taking into account the highest arity of the function nodes + RegressorProgram PRG1 = SS.make_regressor(0, 0, params); + RegressorProgram PRG2 = SS.make_regressor(0, 0, params); + + auto PRG1_model = PRG1.get_model("compact", true); + auto PRG2_model = PRG2.get_model("compact", true); + + fmt::print( + "=================================================\n" + "settings: depth = {}, size= {}\n" + "Original model 1: {}\n" + "depth = {}, size= {}\n" + "Original model 2: {}\n" + "depth = {}, size= {}\n", + d, s, + PRG1.get_model("compact", true), + PRG1.depth(), PRG1.size(), + PRG2.get_model("compact", true), + PRG2.depth(), PRG2.size() + ); + + fmt::print("cross\n"); + auto opt = variator.cross(PRG1, PRG2); + + if (!opt){ + fmt::print("Crossover failed to create a child" + "=================================================\n"); + } + else { + successes += 1; + auto Child = opt.value(); + fmt::print( + "Child Model : {}\n" + "Child Model depth: {}\n" + "Child Model size : {}\n" + "=================================================\n", + Child.get_model("compact", true), + Child.depth(), Child.size() + ); + + // Original didn't change + ASSERT_TRUE(PRG1_model == PRG1.get_model("compact", true)); + ASSERT_TRUE(PRG2_model == PRG2.get_model("compact", true)); + + // Child is within restrictions + ASSERT_TRUE(Child.size() > 0); + ASSERT_TRUE(Child.size() <= s + 3*max_arity); + + ASSERT_TRUE(Child.depth() >= 0); + ASSERT_TRUE(Child.depth() <= d); + } + } + } + ASSERT_TRUE(successes > 0); +} \ No newline at end of file diff --git a/tests/cpp/testsHeader.h b/tests/cpp/testsHeader.h index 24797088..e846c2e6 100644 --- a/tests/cpp/testsHeader.h +++ b/tests/cpp/testsHeader.h @@ -26,9 +26,16 @@ using std::stof; #include #include "../../src/init.h" +#include "../../src/params.h" #include "../../src/data/data.h" #include "../../src/program/operator.h" +#include "../../src/program/dispatch_table.h" +#include "../../src/program/program.h" +#include "../../src/individual.h" +#include "../../src/search_space.h" #include "../../src/variation.h" +#include "../../src/variation.cpp" // TODO: is this ok? + using namespace Brush; using namespace Brush::Data; using namespace Brush::Var;