diff --git a/R-package/tests/testthat/test_GPModel_non_Gaussian_data.R b/R-package/tests/testthat/test_GPModel_non_Gaussian_data.R index 3f911522..fad7a1fe 100644 --- a/R-package/tests/testthat/test_GPModel_non_Gaussian_data.R +++ b/R-package/tests/testthat/test_GPModel_non_Gaussian_data.R @@ -1038,10 +1038,12 @@ if(Sys.getenv("GPBOOST_ALL_TESTS") == "GPBOOST_ALL_TESTS"){ if(inv_method == "iterative") { tolerance_loc_1 <- TOLERANCE_ITERATIVE tolerance_loc_2 <- TOLERANCE_ITERATIVE + tolerance_loc_3 <- 2*TOLERANCE_ITERATIVE loop_cg_PC = c("pivoted_cholesky", "vadu", "fitc") } else { tolerance_loc_1 <- TOLERANCE_STRICT tolerance_loc_2 <- TOLERANCE_MEDIUM + tolerance_loc_3 <-TOLERANCE_STRICT loop_cg_PC = c("vadu") } nsim_var_pred <- 10000 @@ -1111,7 +1113,7 @@ if(Sys.getenv("GPBOOST_ALL_TESTS") == "GPBOOST_ALL_TESTS"){ predict_cov_mat = TRUE, predict_response = FALSE, cov_pars = cov_pars_pred_eval, X_pred = X_test), file='NUL') expect_lt(sum(abs(pred$mu-expected_mu)),tolerance_loc_1) - expect_lt(sum(abs(as.vector(pred$cov)-expected_cov)),tolerance_loc_1) + expect_lt(sum(abs(as.vector(pred$cov)-expected_cov)),tolerance_loc_3) capture.output( pred <- predict(gp_model, y=y, gp_coords_pred = coord_test, predict_var = TRUE, predict_response = FALSE, cov_pars = cov_pars_pred_eval, X_pred = X_test), file='NUL') diff --git a/R-package/tests/testthat/test_z_GPBoost_algorithm.R b/R-package/tests/testthat/test_z_GPBoost_algorithm.R index b87c0d93..5df181e6 100644 --- a/R-package/tests/testthat/test_z_GPBoost_algorithm.R +++ b/R-package/tests/testthat/test_z_GPBoost_algorithm.R @@ -264,7 +264,9 @@ if(Sys.getenv("NO_GPBOOST_ALGO_TESTS") != "NO_GPBOOST_ALGO_TESTS"){ expect_lt(abs(opt_params$best_score-1.224379), TOLERANCE) # Parameter tuning with 'tune.pars.bayesian.optimization' - source("https://raw.githubusercontent.com/fabsig/GPBoost/master/helpers/R_package_tune_pars_bayesian_optimization.R")# Load required function + suppressWarnings({ + source("https://raw.githubusercontent.com/fabsig/GPBoost/master/helpers/R_package_tune_pars_bayesian_optimization.R")# Load required function + }) other_params <- list(objective = "regression_l2", max_depth = 6, num_leaves = 2^10) search_space = list("learning_rate" = c(0.1,1)) crit = makeMBOInfillCritCB() # other criterion options: makeMBOInfillCritEI()