We introduce and study a class of probabilistic generative models, where the
latent object is a finite-dimensional diffusion process on a finite time
interval and the observed variable is drawn conditionally on the terminal point
of the diffusion. We make the following contributions:
We provide a unified viewpoint on both sampling and variational inference in
such generative models through the lens of stochastic control.
We quantify the expressiveness of diffusion-based generative models.
Specifically, we show that one can efficiently sample from a wide class of
terminal target distributions by choosing the drift of the latent diffusion
from the class of multilayer feedforward neural nets, with the accuracy of
sampling measured by the Kullback-Leibler divergence to the target
distribution.
Finally, we present and analyze a scheme for unbiased simulation of
generative models with latent diffusions and provide bounds on the variance of
the resulting estimators. This scheme can be implemented as a deep generative
model with a random number of layers.
Description
[1903.01608] Theoretical guarantees for sampling and inference in generative models with latent diffusions
%0 Journal Article
%1 tzen2019theoretical
%A Tzen, Belinda
%A Raginsky, Maxim
%D 2019
%K generative-models sampling stats theory
%T Theoretical guarantees for sampling and inference in generative models
with latent diffusions
%U http://arxiv.org/abs/1903.01608
%X We introduce and study a class of probabilistic generative models, where the
latent object is a finite-dimensional diffusion process on a finite time
interval and the observed variable is drawn conditionally on the terminal point
of the diffusion. We make the following contributions:
We provide a unified viewpoint on both sampling and variational inference in
such generative models through the lens of stochastic control.
We quantify the expressiveness of diffusion-based generative models.
Specifically, we show that one can efficiently sample from a wide class of
terminal target distributions by choosing the drift of the latent diffusion
from the class of multilayer feedforward neural nets, with the accuracy of
sampling measured by the Kullback-Leibler divergence to the target
distribution.
Finally, we present and analyze a scheme for unbiased simulation of
generative models with latent diffusions and provide bounds on the variance of
the resulting estimators. This scheme can be implemented as a deep generative
model with a random number of layers.
@article{tzen2019theoretical,
abstract = {We introduce and study a class of probabilistic generative models, where the
latent object is a finite-dimensional diffusion process on a finite time
interval and the observed variable is drawn conditionally on the terminal point
of the diffusion. We make the following contributions:
We provide a unified viewpoint on both sampling and variational inference in
such generative models through the lens of stochastic control.
We quantify the expressiveness of diffusion-based generative models.
Specifically, we show that one can efficiently sample from a wide class of
terminal target distributions by choosing the drift of the latent diffusion
from the class of multilayer feedforward neural nets, with the accuracy of
sampling measured by the Kullback-Leibler divergence to the target
distribution.
Finally, we present and analyze a scheme for unbiased simulation of
generative models with latent diffusions and provide bounds on the variance of
the resulting estimators. This scheme can be implemented as a deep generative
model with a random number of layers.},
added-at = {2019-04-20T18:22:28.000+0200},
author = {Tzen, Belinda and Raginsky, Maxim},
biburl = {https://www.bibsonomy.org/bibtex/24cb8236574c24fea7916ca0b84a95dfe/kirk86},
description = {[1903.01608] Theoretical guarantees for sampling and inference in generative models with latent diffusions},
interhash = {a91fb4e29efee26bbdfeefb3b875c90c},
intrahash = {4cb8236574c24fea7916ca0b84a95dfe},
keywords = {generative-models sampling stats theory},
note = {cite arxiv:1903.01608},
timestamp = {2019-04-20T18:22:28.000+0200},
title = {Theoretical guarantees for sampling and inference in generative models
with latent diffusions},
url = {http://arxiv.org/abs/1903.01608},
year = 2019
}