@techreport{230faed7b4e94f738d390439c4d76f94,
title = "Efficient global optimization and the zero-gradient condition, in expensive simulation",
abstract = "We derive novel algorithms for the sequential solution of optimization problems with box constraints in deterministic simulation where the optimal solutions lie inside the feasible set. We treat the simula tion model as a black box, combining “efficient global optimization”(EGO) with the classic first-order optimality condition. EGO is closely related to “Bayesian optimization”and “active machine learning” as EGO estimates a Kriging (or Gaussian process) metamodel of the underlying simulation model. The classic necessary optimality condition requires that the gradient be a zero vector at a minimum. To quantify how close the estimated gradient—at a new simulation input combination—is to a zero vector, we derive three variants that use different distance measures; namely, the Chebyshev, the Euclidean, and the Mahalanobis norms. These measures result in three statistics that define penalty functions. We numerically compare the efficiency and effectiveness of these variants and a classic EGO variant, in four popular test functions. Based on these experiments, we conclude that our variants may perform better than this classic variant, in pertinent iterations of the search for a global minimizer of a specific problem.",
keywords = "Simulation optimization, Bayesian Optimization, active machine learning, Kriging, Gaussian process",
author = "Ebru Angun and Jack Kleijnen",
note = "CentER Discussion Paper Nr. 2024-023",
year = "2024",
month = nov,
day = "14",
language = "English",
volume = "2024-023",
series = "CentER Discussion Paper",
publisher = "CentER, Center for Economic Research",
pages = "1--26",
type = "WorkingPaper",
institution = "CentER, Center for Economic Research",
}