{ "id": "2007.00939", "version": "v1", "published": "2020-07-02T07:35:49.000Z", "updated": "2020-07-02T07:35:49.000Z", "title": "BOSH: Bayesian Optimization by Sampling Hierarchically", "authors": [ "Henry B. Moss", "David S. Leslie", "Paul Rayson" ], "categories": [ "cs.LG", "stat.ML" ], "abstract": "Deployments of Bayesian Optimization (BO) for functions with stochastic evaluations, such as parameter tuning via cross validation and simulation optimization, typically optimize an average of a fixed set of noisy realizations of the objective function. However, disregarding the true objective function in this manner finds a high-precision optimum of the wrong function. To solve this problem, we propose Bayesian Optimization by Sampling Hierarchically (BOSH), a novel BO routine pairing a hierarchical Gaussian process with an information-theoretic framework to generate a growing pool of realizations as the optimization progresses. We demonstrate that BOSH provides more efficient and higher-precision optimization than standard BO across synthetic benchmarks, simulation optimization, reinforcement learning and hyper-parameter tuning tasks.", "revisions": [ { "version": "v1", "updated": "2020-07-02T07:35:49.000Z" } ], "analyses": { "keywords": [ "bayesian optimization", "simulation optimization", "stochastic evaluations", "cross validation", "true objective function" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }