# __References__ # # [1] [Takeno, S., et al., _Multi-fidelity Bayesian Optimization with Max-value Entropy Search._ arXiv:1901.08275v1, 2019](https://arxiv.org/abs/1901.08275) # # [2] [Wang, Z., Jegelka, S., _Max-value Entropy Search for Efficient Bayesian Optimization._ arXiv:1703.01968v3, 2018](https://arxiv.org/abs/1703.01968) # # ### 2. Setting up a toy model # We will fit a standard SingleTaskGP model on noisy observations of the synthetic 2D Branin function on the hypercube $[-5,10]\times [0, 15]$. # In[1]: import math import torch from botorch.test_functions import Branin from botorch.fit import fit_gpytorch_mll from botorch.models import SingleTaskGP from botorch.utils.transforms import standardize, normalize from gpytorch.mlls import ExactMarginalLogLikelihood torch.manual_seed(7) bounds = torch.tensor(Branin._bounds).T train_X = bounds[0] + (bounds[1] - bounds[0]) * torch.rand(10, 2) train_Y = Branin(negate=True)(train_X).unsqueeze(-1) train_X = normalize(train_X, bounds=bounds) train_Y = standardize(train_Y + 0.05 * torch.randn_like(train_Y)) model = SingleTaskGP(train_X, train_Y) mll = ExactMarginalLogLikelihood(model.likelihood, model) fit_gpytorch_mll(mll); # ### 3. Defining the MES acquisition function # # The `qMaxValueEntropy` acquisition function is a subclass of `MCAcquisitionFunction` and supports pending points `X_pending`. Required arguments for the constructor are `model` and `candidate_set` (the discretized candidate points in the design space that will be used to draw max value samples). There are also other optional parameters, such as number of max value samples $\mathcal{F^*}$, number of $\mathcal{Y}$ samples and number of fantasies (in case of $q>1$). Two different sampling algorithms are supported for the max value samples: the discretized Thompson sampling and the Gumbel sampling introduced in [2]. Gumbel sampling is the default choice in the acquisition function. # In[2]: from botorch.acquisition.max_value_entropy_search import qMaxValueEntropy candidate_set = torch.rand( 1000, bounds.size(1), device=bounds.device, dtype=bounds.dtype ) candidate_set = bounds[0] + (bounds[1] - bounds[0]) * candidate_set qMES = qMaxValueEntropy(model, candidate_set) # ### 4. Optimizing the MES acquisition function to get the next candidate points # In order to obtain the next candidate point(s) to query, we need to optimize the acquisition function over the design space. For $q=1$ case, we can simply call the `optimize_acqf` function in the library. At $q>1$, due to the intractability of the aquisition function in this case, we need to use either sequential or cyclic optimization (multiple cycles of sequential optimization). # In[3]: from botorch.optim import optimize_acqf # for q = 1 candidates, acq_value = optimize_acqf( acq_function=qMES, bounds=bounds, q=1, num_restarts=10, raw_samples=512, ) candidates, acq_value # In[4]: # for q = 2, sequential optimization candidates_q2, acq_value_q2 = optimize_acqf( acq_function=qMES, bounds=bounds, q=2, num_restarts=10, raw_samples=512, sequential=True, ) candidates_q2, acq_value_q2 # In[5]: from botorch.optim import optimize_acqf_cyclic # for q = 2, cyclic optimization candidates_q2_cyclic, acq_value_q2_cyclic = optimize_acqf_cyclic( acq_function=qMES, bounds=bounds, q=2, num_restarts=10, raw_samples=512, cyclic_options={"maxiter": 2}, ) candidates_q2_cyclic, acq_value_q2_cyclic # The use of the `qMultiFidelityMaxValueEntropy` acquisition function is very similar to `qMaxValueEntropy`, but requires additional optional arguments related to the fidelity and cost models. We will provide more details on the MF-MES acquisition function in a separate tutorial.