Home Arrow Icon Knowledge base Arrow Icon Global Arrow Icon Python Bayesian Optimization Sample Code


Python Bayesian Optimization Sample Code


python
# Example of Bayesian Optimization in Python using the bayesian-optimization package

from bayes_opt import BayesianOptimization
import numpy as np

# Define the function to be optimized (black-box function)
def black_box_function(x, y):
    return -x  2 - (y - 1)  2 + 1

# Define the bounds of the parameters space
pbounds = {'x': (-2, 2), 'y': (-3, 3)}

# Create the Bayesian Optimization object
optimizer = BayesianOptimization(
    f=black_box_function,
    pbounds=pbounds,
    random_state=1,
)

# Perform the optimization
optimizer.maximize(
    init_points=2,  # Initial random explorations
    n_iter=10,      # Number of iterations to perform optimization
)

# Print the best result
print("Best result: ", optimizer.max)

# Output example might show the best parameters and the corresponding function value found.

python
# Bayesian Optimization from scratch with Gaussian Process surrogate model and acquisition function

import numpy as np
from numpy import sin, pi
from numpy.random import normal, random
from sklearn.gaussian_process import GaussianProcessRegressor
from scipy.stats import norm
from matplotlib import pyplot as plt
from warnings import catch_warnings, simplefilter

# Objective function to optimize
def objective(x, noise=0.1):
    noise_val = normal(loc=0, scale=noise)
    return (x  2) * (sin(5 * pi * x)  6) + noise_val

# Surrogate function: Gaussian Process regressor prediction
def surrogate(model, X):
    with catch_warnings():
        simplefilter("ignore")
        return model.predict(X, return_std=True)

# Acquisition function: Probability of Improvement (PI)
def acquisition(X, Xsamples, model):
    yhat, _ = surrogate(model, X)
    best = max(yhat)
    mu, std = surrogate(model, Xsamples)
    mu = mu.ravel()
    probs = norm.cdf((mu - best) / (std + 1E-9))
    return probs

# Optimize the acquisition function by random sampling
def opt_acquisition(X, y, model):
    Xsamples = random(100).reshape(-1, 1)  # 100 candidates randomly sampled
    scores = acquisition(X, Xsamples, model)
    ix = np.argmax(scores)
    return Xsamples[ix].reshape(-1, 1)

# Initialization
X = np.array([[random()] for _ in range(5)])  # Start with 5 random points
y = np.array([[objective(x)] for x in X])

model = GaussianProcessRegressor()

# Fit initial model
model.fit(X, y)

# Perform 20 iterations of Bayesian Optimization
for i in range(20):
    x_next = opt_acquisition(X, y, model)
    y_next = objective(x_next[0])
    print(f"Iteration {i+1}: x={x_next[0]:.4f}, y={y_next:.4f}")
    X = np.vstack((X, x_next))
    y = np.vstack((y, y_next))
    model.fit(X, y)

# Plot the final function and sampled points
x_plot = np.linspace(0, 1, 1000).reshape(-1, 1)
y_true = [(x[0]  2) * (sin(5 * pi * x[0])  6) for x in x_plot]

plt.plot(x_plot, y_true, 'r-', label='True function')
plt.scatter(X, y, c='b', label='Samples')
plt.legend()
plt.title("Bayesian Optimization Sampling")
plt.show()

python
# Example of Bayesian Optimization for hyperparameter tuning using scikit-optimize (skopt)

from skopt import gp_minimize
from skopt.space import Integer
from skopt.utils import use_named_args
from sklearn.datasets import make_blobs
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import cross_val_score
from numpy import mean

# Generate a 2D classification dataset
X, y = make_blobs(n_samples=500, centers=3, n_features=2)

# Define the model
model = KNeighborsClassifier()

# Define hyperparameter search space
search_space = [Integer(1, 5, name='n_neighbors'), Integer(1, 2, name='p')]

# Define the evaluation function with named args
@use_named_args(search_space)
def evaluate_model(n_neighbors, p):
    model.set_params(n_neighbors=n_neighbors, p=p)
    result = cross_val_score(model, X, y, cv=5, n_jobs=-1, scoring='accuracy')
    return 1.0 - mean(result)  # Minimize error

# Perform Bayesian Optimization
result = gp_minimize(evaluate_model, search_space)

# Report best result
print(f"Best Accuracy: {1.0 - result.fun:.3f}")
print(f"Best Parameters: n_neighbors={result.x[0]}, p={result.x}")

python
# Example using GPyOpt for Bayesian Optimization of a multi-dimensional function

import GPyOpt
import numpy as np
import matplotlib.pyplot as plt

# Define the function to minimize
def objfunc2d(x):
    x1 = x[:, 0]
    x2 = x[:, 1]
    return ((x1  2 + x2  2) * (np.sin(x1)  2 - np.cos(x2))).reshape(-1, 1)

# Define domain bounds
bounds2d = [{'name': 'var_1', 'type': 'continuous', 'domain': (0, 10)},
            {'name': 'var_2', 'type': 'continuous', 'domain': (0, 10)}]

# Instantiate Bayesian Optimization object
max_iter = 50
myBopt_2d = GPyOpt.methods.BayesianOptimization(f=objfunc2d,
                                                domain=bounds2d)

# Run optimization
myBopt_2d.run_optimization(max_iter=max_iter)

print("="*20)
print(f"Value of (x,y) that minimizes the objective: {myBopt_2d.x_opt}")
print(f"Minimum value of the objective: {myBopt_2d.fx_opt}")
print("="*20)

# Plot acquisition function
myBopt_2d.plot_acquisition()
plt.show()

python
# Example of Bayesian Optimization in hyperparameter tuning with the bayesian-optimization package

from bayes_opt import BayesianOptimization
from sklearn.svm import SVR
from sklearn.model_selection import cross_val_score
from sklearn.datasets import load_boston
from numpy import mean

# Load data
boston = load_boston()
X = boston.data
y = boston.target

# Define the SVR model evaluation function
def svr_cv(C, epsilon):
    model = SVR(C=C, epsilon=epsilon)
    scores = cross_val_score(model, X, y, cv=5, scoring='neg_mean_squared_error')
    return scores.mean()

# Define parameter bounds
pbounds = {'C': (0.1, 10), 'epsilon': (0.01, 1)}

# Setup Bayesian Optimization
optimizer = BayesianOptimization(
    f=svr_cv,
    pbounds=pbounds,
    random_state=1,
)

# Run optimization
optimizer.maximize(init_points=5, n_iter=25)

print("Best parameters found:")
print(optimizer.max['params'])

python
# Example explaining core components of Bayesian Optimization with code snippets

# Import necessary libraries
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import RBF, ConstantKernel as C
from scipy.stats import norm
import numpy as np

# Define an example objective function
def objective(x):
    return - (x - 2)  2 + 3

# Kernel definition for Gaussian Process
kernel = C(1.0, (1e-3, 1e3)) * RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e2))

# Gaussian Process model
gp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=10, alpha=1e-4)

# Training data (some initial points)
X_train = np.array([[1.0], [3.0], [2.0]])
y_train = objective(X_train).ravel()

# Fit the model
gp.fit(X_train, y_train)

# Predict mean and sigma for new query points
X_query = np.array([[1.5], [2.5]])
mu, sigma = gp.predict(X_query, return_std=True)

# Acquisition function - Expected Improvement
def expected_improvement(X, model, y_max):
    mu, sigma = model.predict(X, return_std=True)
    with np.errstate(divide='warn'):
        Z = (mu - y_max) / sigma
        ei = (mu - y_max) * norm.cdf(Z) + sigma * norm.pdf(Z)
        ei[sigma == 0.0] = 0.0
    return ei

# Calculate expected improvement at query points
y_max = y_train.max()
ei_values = expected_improvement(X_query, gp, y_max)

python
# Example: acquisition function optimization using scipy optimize

from scipy.optimize import minimize

# Function to be maximized (negative expected improvement because minimize is used)
def min_obj(x):
    return -expected_improvement(np.array([x]), gp, y_max)

# Initial guess
x0 = np.array()

# Bounds for parameter
bounds = [(0, 4)]

# Optimization
res = minimize(min_obj, x0=x0, bounds=bounds, method='L-BFGS-B')
x_next = res.x

print(f"Next sampling point suggested by acquisition: {x_next}")

```python
# Full Bayesian Optimization loop with GP surrogate, acquisition function, and optimization

import numpy as np
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern
from scipy.stats import norm

# Function to optimize
def f(x):
return -np.sin(3 * x) - x 2 + 0.7 * x

# Define bounds
bounds = np.array([[-2, 2]])

# Initial sampling points
X_sample = np.array([[-1.5], [0.0], [1.5]])
Y_sample = f(X_sample)

# Gaussian Process model
kernel = Matern(length_scale=1.0)
model = GaussianProcessRegressor(kernel=kernel)

# Fit model on initial samples
model.fit(X_sample, Y_sample)

# Expected Improvement acquisition function
def expected_improvement(X, X_sample, Y_sample, model, xi=0.01):
mu, sigma = model.predict(X, return_std=True)
mu_sample = model.predict(X_sample)

sigma = sigma.reshape(-1, 1)
mu_sample_opt = np.max(Y_sample)

with np.errstate(divide='warn'):
imp = mu - mu_sample_opt - xi
Z = imp / sigma
ei = imp * norm.cdf(Z) + sigma * norm.pdf(Z)
ei[sigma == 0.0] = 0.0
return ei

# Optimize acquisition function
from scipy.optimize import minimize

def propose_location(acquisition, X_sample, Y_sample, model, bounds, n_restarts=25):
dim = X_sample.shape
min_val = 1
min_x = None

def min_obj(X):
return -acquisition(X.reshape(-1, dim), X_sample, Y_sample, model)

for x0 in np.random.uniform(bounds[:, 0], bounds[:, 1], size=(n_restarts, dim)):
res = minimize(min_obj, x0=x0, bounds=bounds, method='L-BFGS-B')
if res.fun