Normalizing flows provide a general mechanism for defining expressive
probability distributions, only requiring the specification of a (usually
simple) base distribution and a series of bijective transformations. There has
been much recent work on normalizing flows, ranging from improving their
expressive power to expanding their application. We believe the field has now
matured and is in need of a unified perspective. In this review, we attempt to
provide such a perspective by describing flows through the lens of
probabilistic modeling and inference. We place special emphasis on the
fundamental principles of flow design, and discuss foundational topics such as
expressive power and computational trade-offs. We also broaden the conceptual
framing of flows by relating them to more general probability transformations.
Lastly, we summarize the use of flows for tasks such as generative modeling,
approximate inference, and supervised learning.
Beschreibung
[1912.02762] Normalizing Flows for Probabilistic Modeling and Inference
%0 Journal Article
%1 papamakarios2019normalizing
%A Papamakarios, George
%A Nalisnick, Eric
%A Rezende, Danilo Jimenez
%A Mohamed, Shakir
%A Lakshminarayanan, Balaji
%D 2019
%K bayesian flows generative-models optimal-transport readings survey
%T Normalizing Flows for Probabilistic Modeling and Inference
%U http://arxiv.org/abs/1912.02762
%X Normalizing flows provide a general mechanism for defining expressive
probability distributions, only requiring the specification of a (usually
simple) base distribution and a series of bijective transformations. There has
been much recent work on normalizing flows, ranging from improving their
expressive power to expanding their application. We believe the field has now
matured and is in need of a unified perspective. In this review, we attempt to
provide such a perspective by describing flows through the lens of
probabilistic modeling and inference. We place special emphasis on the
fundamental principles of flow design, and discuss foundational topics such as
expressive power and computational trade-offs. We also broaden the conceptual
framing of flows by relating them to more general probability transformations.
Lastly, we summarize the use of flows for tasks such as generative modeling,
approximate inference, and supervised learning.
@article{papamakarios2019normalizing,
abstract = {Normalizing flows provide a general mechanism for defining expressive
probability distributions, only requiring the specification of a (usually
simple) base distribution and a series of bijective transformations. There has
been much recent work on normalizing flows, ranging from improving their
expressive power to expanding their application. We believe the field has now
matured and is in need of a unified perspective. In this review, we attempt to
provide such a perspective by describing flows through the lens of
probabilistic modeling and inference. We place special emphasis on the
fundamental principles of flow design, and discuss foundational topics such as
expressive power and computational trade-offs. We also broaden the conceptual
framing of flows by relating them to more general probability transformations.
Lastly, we summarize the use of flows for tasks such as generative modeling,
approximate inference, and supervised learning.},
added-at = {2019-12-06T17:03:10.000+0100},
author = {Papamakarios, George and Nalisnick, Eric and Rezende, Danilo Jimenez and Mohamed, Shakir and Lakshminarayanan, Balaji},
biburl = {https://www.bibsonomy.org/bibtex/2b85241009d6bbb6864f5c9860276a9b9/kirk86},
description = {[1912.02762] Normalizing Flows for Probabilistic Modeling and Inference},
interhash = {dfbb8382be8c4637dcef7efb2f6e36ab},
intrahash = {b85241009d6bbb6864f5c9860276a9b9},
keywords = {bayesian flows generative-models optimal-transport readings survey},
note = {cite arxiv:1912.02762Comment: Review article. 60 pages, 4 figures},
timestamp = {2019-12-06T17:11:34.000+0100},
title = {Normalizing Flows for Probabilistic Modeling and Inference},
url = {http://arxiv.org/abs/1912.02762},
year = 2019
}