We propose a novel sampling framework for inference in probabilistic models:
an active learning approach that converges more quickly (in wall-clock time)
than Markov chain Monte Carlo (MCMC) benchmarks. The central challenge in
probabilistic inference is numerical integration, to average over ensembles of
models or unknown (hyper-)parameters (for example to compute the marginal
likelihood or a partition function). MCMC has provided approaches to numerical
integration that deliver state-of-the-art inference, but can suffer from sample
inefficiency and poor convergence diagnostics. Bayesian quadrature techniques
offer a model-based solution to such problems, but their uptake has been
hindered by prohibitive computation costs. We introduce a warped model for
probabilistic integrands (likelihoods) that are known to be non-negative,
permitting a cheap active learning scheme to optimally select sample locations.
Our algorithm is demonstrated to offer faster convergence (in seconds) relative
to simple Monte Carlo and annealed importance sampling on both synthetic and
real-world examples.
Description
[1411.0439] Sampling for Inference in Probabilistic Models with Fast Bayesian Quadrature
%0 Journal Article
%1 gunter2014sampling
%A Gunter, Tom
%A Osborne, Michael A.
%A Garnett, Roman
%A Hennig, Philipp
%A Roberts, Stephen J.
%D 2014
%K bayesian readings sampling stats
%T Sampling for Inference in Probabilistic Models with Fast Bayesian
Quadrature
%U http://arxiv.org/abs/1411.0439
%X We propose a novel sampling framework for inference in probabilistic models:
an active learning approach that converges more quickly (in wall-clock time)
than Markov chain Monte Carlo (MCMC) benchmarks. The central challenge in
probabilistic inference is numerical integration, to average over ensembles of
models or unknown (hyper-)parameters (for example to compute the marginal
likelihood or a partition function). MCMC has provided approaches to numerical
integration that deliver state-of-the-art inference, but can suffer from sample
inefficiency and poor convergence diagnostics. Bayesian quadrature techniques
offer a model-based solution to such problems, but their uptake has been
hindered by prohibitive computation costs. We introduce a warped model for
probabilistic integrands (likelihoods) that are known to be non-negative,
permitting a cheap active learning scheme to optimally select sample locations.
Our algorithm is demonstrated to offer faster convergence (in seconds) relative
to simple Monte Carlo and annealed importance sampling on both synthetic and
real-world examples.
@article{gunter2014sampling,
abstract = {We propose a novel sampling framework for inference in probabilistic models:
an active learning approach that converges more quickly (in wall-clock time)
than Markov chain Monte Carlo (MCMC) benchmarks. The central challenge in
probabilistic inference is numerical integration, to average over ensembles of
models or unknown (hyper-)parameters (for example to compute the marginal
likelihood or a partition function). MCMC has provided approaches to numerical
integration that deliver state-of-the-art inference, but can suffer from sample
inefficiency and poor convergence diagnostics. Bayesian quadrature techniques
offer a model-based solution to such problems, but their uptake has been
hindered by prohibitive computation costs. We introduce a warped model for
probabilistic integrands (likelihoods) that are known to be non-negative,
permitting a cheap active learning scheme to optimally select sample locations.
Our algorithm is demonstrated to offer faster convergence (in seconds) relative
to simple Monte Carlo and annealed importance sampling on both synthetic and
real-world examples.},
added-at = {2019-11-05T20:53:40.000+0100},
author = {Gunter, Tom and Osborne, Michael A. and Garnett, Roman and Hennig, Philipp and Roberts, Stephen J.},
biburl = {https://www.bibsonomy.org/bibtex/2d028abe53595dc720c480f185ab9d765/kirk86},
description = {[1411.0439] Sampling for Inference in Probabilistic Models with Fast Bayesian Quadrature},
interhash = {acf25b0349ccda15f3ff228eace260d9},
intrahash = {d028abe53595dc720c480f185ab9d765},
keywords = {bayesian readings sampling stats},
note = {cite arxiv:1411.0439},
timestamp = {2019-11-05T20:53:40.000+0100},
title = {Sampling for Inference in Probabilistic Models with Fast Bayesian
Quadrature},
url = {http://arxiv.org/abs/1411.0439},
year = 2014
}