Skip to content

Commit

Permalink
update all test cases to work with the new structure
Browse files Browse the repository at this point in the history
  • Loading branch information
yalsaffar committed Nov 19, 2024
1 parent 15f395f commit 87261b4
Show file tree
Hide file tree
Showing 21 changed files with 292 additions and 186 deletions.
38 changes: 27 additions & 11 deletions tests/acquisition/test_mi.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
from gpytorch.kernels import LinearKernel
from gpytorch.means import ConstantMean
from scipy.stats import bernoulli, multivariate_normal, norm, pearsonr
from aepsych.models.inducing_point_allocators import SobolAllocator
from aepsych.models.utils import select_inducing_points


class SingleProbitMI(unittest.TestCase):
Expand All @@ -34,8 +36,12 @@ def test_1d_monotonic_single_probit(self):
np.random.seed(seed)
n_init = 15
n_opt = 1
lb = -4.0
ub = 4.0
lb = torch.tensor([-4.0])
ub = torch.tensor([4.0])
inducing_size = 10
bounds = torch.stack([lb, ub])
inducing_points = select_inducing_points(inducing_size=inducing_size, allocator=SobolAllocator(bounds=bounds))

acqf = MonotonicBernoulliMCMutualInformation
acqf_kwargs = {"objective": ProbitObjective()}
model_list = [
Expand All @@ -51,8 +57,8 @@ def test_1d_monotonic_single_probit(self):
lb=lb,
ub=ub,
min_asks=n_opt,
model=MonotonicRejectionGP(lb=lb, ub=ub, dim=1, monotonic_idxs=[0]),
generator=MonotonicRejectionGenerator(acqf, acqf_kwargs),
model=MonotonicRejectionGP(inducing_points=inducing_points, dim=1, monotonic_idxs=[0]),
generator=MonotonicRejectionGenerator(acqf=acqf, acqf_kwargs=acqf_kwargs, lb=lb, ub=ub),
stimuli_per_trial=1,
outcome_types=["binary"],
),
Expand Down Expand Up @@ -81,8 +87,13 @@ def test_1d_single_probit(self):
np.random.seed(seed)
n_init = 15
n_opt = 20
lb = -4.0
ub = 4.0
lb = torch.tensor([-4.0])
ub = torch.tensor([4.0])
inducing_size = 10
bounds = torch.stack([lb, ub])
inducing_points = select_inducing_points(inducing_size=inducing_size, allocator=SobolAllocator(bounds=bounds))


acqf = BernoulliMCMutualInformation
extra_acqf_args = {"objective": ProbitObjective()}

Expand All @@ -98,8 +109,8 @@ def test_1d_single_probit(self):
Strategy(
lb=lb,
ub=ub,
model=GPClassificationModel(lb=lb, ub=ub, dim=1, inducing_size=10),
generator=OptimizeAcqfGenerator(acqf, extra_acqf_args),
model=GPClassificationModel(inducing_points=inducing_points, dim=1, inducing_size=10),
generator=OptimizeAcqfGenerator(acqf=acqf, lb=lb, ub=ub, acqf_kwargs=extra_acqf_args),
min_asks=n_opt,
stimuli_per_trial=1,
outcome_types=["binary"],
Expand All @@ -126,10 +137,15 @@ def test_1d_single_probit(self):
def test_mi_acqf(self):
mean = ConstantMean().initialize(constant=1.2)
covar = LinearKernel().initialize(variance=1.0)
lb = torch.tensor([0.0])
ub = torch.tensor([1.0])
inducing_size = 10
bounds = torch.stack([lb, ub])
inducing_points = select_inducing_points(inducing_size=inducing_size, allocator=SobolAllocator(bounds=bounds))

model = GPClassificationModel(
lb=torch.Tensor([0]),
ub=torch.Tensor([1]),
inducing_size=10,
inducing_points=inducing_points,
inducing_size=inducing_size,
mean_module=mean,
covar_module=covar,
)
Expand Down
8 changes: 7 additions & 1 deletion tests/generators/test_epsilon_greedy_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,17 @@ def test_epsilon_greedy(self):
np.random.seed(seed)
total_trials = 2000
extra_acqf_args = {"target": 0.75, "beta": 1.96}
lb = torch.tensor([0.0])
ub = torch.tensor([1.0])

for epsilon in (0.1, 0.5):
gen = EpsilonGreedyGenerator(
subgenerator=MonotonicRejectionGenerator(
acqf=MonotonicMCLSE, acqf_kwargs=extra_acqf_args
acqf=MonotonicMCLSE, acqf_kwargs=extra_acqf_args, lb=lb, ub=ub
),
epsilon=epsilon,
lb=lb,
ub=ub
)
model = MagicMock()
gen.subgenerator.gen = MagicMock()
Expand All @@ -44,6 +48,8 @@ def test_greedyepsilon_config(self):
config_str = """
[common]
acqf = MonotonicMCLSE
lb = [0]
ub = [1]
[EpsilonGreedyGenerator]
subgenerator = MonotonicRejectionGenerator
epsilon = .5
Expand Down
21 changes: 16 additions & 5 deletions tests/generators/test_optimize_acqf_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
from aepsych.models import GPClassificationModel, PairwiseProbitModel
from botorch.acquisition.preference import AnalyticExpectedUtilityOfBestOption
from sklearn.datasets import make_classification

from aepsych.models.inducing_point_allocators import SobolAllocator
from aepsych.models.utils import select_inducing_points

class TestOptimizeAcqfGenerator(unittest.TestCase):
def test_time_limits(self):
Expand All @@ -33,17 +34,21 @@ def test_time_limits(self):
n_clusters_per_class=4,
)
X, y = torch.Tensor(X), torch.Tensor(y)
lb = -3 * torch.ones(8)
ub = 3 * torch.ones(8)
inducing_size = 10
bounds = torch.stack([lb, ub])
inducing_points = select_inducing_points(inducing_size=inducing_size, allocator=SobolAllocator(bounds=bounds))

model = GPClassificationModel(
lb=-3 * torch.ones(8),
ub=3 * torch.ones(8),
inducing_points=inducing_points,
max_fit_time=0.5,
inducing_size=10,
inducing_size=inducing_size,
)

model.fit(X, y)
generator = OptimizeAcqfGenerator(
acqf=MCLevelSetEstimation, acqf_kwargs={"beta": 1.96, "target": 0.5}
acqf=MCLevelSetEstimation, acqf_kwargs={"beta": 1.96, "target": 0.5}, lb=lb, ub=ub,
)

start = time.time()
Expand All @@ -54,6 +59,8 @@ def test_time_limits(self):
acqf=MCLevelSetEstimation,
acqf_kwargs={"beta": 1.96, "target": 0.5},
max_gen_time=0.1,
lb=lb,
ub=ub,
)

start = time.time()
Expand All @@ -68,6 +75,10 @@ def test_time_limits(self):

def test_instantiate_eubo(self):
config = """
[common]
lb = [-1]
ub = [1]
[OptimizeAcqfGenerator]
acqf = AnalyticExpectedUtilityOfBestOption
stimuli_per_trial = 2
Expand Down
Loading

0 comments on commit 87261b4

Please sign in to comment.