D. Rezende, and S. Mohamed. (2015)cite arxiv:1505.05770Comment: Proceedings of the 32nd International Conference on Machine Learning.
Abstract
The choice of approximate posterior distribution is one of the core problems
in variational inference. Most applications of variational inference employ
simple families of posterior approximations in order to allow for efficient
inference, focusing on mean-field or other simple structured approximations.
This restriction has a significant impact on the quality of inferences made
using variational methods. We introduce a new approach for specifying flexible,
arbitrarily complex and scalable approximate posterior distributions. Our
approximations are distributions constructed through a normalizing flow,
whereby a simple initial density is transformed into a more complex one by
applying a sequence of invertible transformations until a desired level of
complexity is attained. We use this view of normalizing flows to develop
categories of finite and infinitesimal flows and provide a unified view of
approaches for constructing rich posterior approximations. We demonstrate that
the theoretical advantages of having posteriors that better match the true
posterior, combined with the scalability of amortized variational approaches,
provides a clear improvement in performance and applicability of variational
inference.
Description
[1505.05770] Variational Inference with Normalizing Flows
%0 Generic
%1 rezende2015variational
%A Rezende, Danilo Jimenez
%A Mohamed, Shakir
%D 2015
%K deep-learning from:adulny generative-models normalizing-flows variational-inference
%T Variational Inference with Normalizing Flows
%U http://arxiv.org/abs/1505.05770
%X The choice of approximate posterior distribution is one of the core problems
in variational inference. Most applications of variational inference employ
simple families of posterior approximations in order to allow for efficient
inference, focusing on mean-field or other simple structured approximations.
This restriction has a significant impact on the quality of inferences made
using variational methods. We introduce a new approach for specifying flexible,
arbitrarily complex and scalable approximate posterior distributions. Our
approximations are distributions constructed through a normalizing flow,
whereby a simple initial density is transformed into a more complex one by
applying a sequence of invertible transformations until a desired level of
complexity is attained. We use this view of normalizing flows to develop
categories of finite and infinitesimal flows and provide a unified view of
approaches for constructing rich posterior approximations. We demonstrate that
the theoretical advantages of having posteriors that better match the true
posterior, combined with the scalability of amortized variational approaches,
provides a clear improvement in performance and applicability of variational
inference.
@misc{rezende2015variational,
abstract = {The choice of approximate posterior distribution is one of the core problems
in variational inference. Most applications of variational inference employ
simple families of posterior approximations in order to allow for efficient
inference, focusing on mean-field or other simple structured approximations.
This restriction has a significant impact on the quality of inferences made
using variational methods. We introduce a new approach for specifying flexible,
arbitrarily complex and scalable approximate posterior distributions. Our
approximations are distributions constructed through a normalizing flow,
whereby a simple initial density is transformed into a more complex one by
applying a sequence of invertible transformations until a desired level of
complexity is attained. We use this view of normalizing flows to develop
categories of finite and infinitesimal flows and provide a unified view of
approaches for constructing rich posterior approximations. We demonstrate that
the theoretical advantages of having posteriors that better match the true
posterior, combined with the scalability of amortized variational approaches,
provides a clear improvement in performance and applicability of variational
inference.},
added-at = {2021-06-22T15:24:09.000+0200},
author = {Rezende, Danilo Jimenez and Mohamed, Shakir},
biburl = {https://www.bibsonomy.org/bibtex/275e653896b108fb0845eec3c571f4454/adulny},
description = {[1505.05770] Variational Inference with Normalizing Flows},
interhash = {3d2c32ae3adbf318c87c68823814b847},
intrahash = {75e653896b108fb0845eec3c571f4454},
keywords = {deep-learning from:adulny generative-models normalizing-flows variational-inference},
note = {cite arxiv:1505.05770Comment: Proceedings of the 32nd International Conference on Machine Learning},
timestamp = {2021-06-22T15:24:09.000+0200},
title = {Variational Inference with Normalizing Flows},
url = {http://arxiv.org/abs/1505.05770},
year = 2015
}