We propose a Laplace approximation that creates a stochastic unit from any
smooth monotonic activation function, using only Gaussian noise. This paper
investigates the application of this stochastic approximation in training a
family of Restricted Boltzmann Machines (RBM) that are closely linked to
Bregman divergences. This family, that we call exponential family RBM
(Exp-RBM), is a subset of the exponential family Harmoniums that expresses
family members through a choice of smooth monotonic non-linearity for each
neuron. Using contrastive divergence along with our Gaussian approximation, we
show that Exp-RBM can learn useful representations using novel stochastic
units.
%0 Generic
%1 ravanbakhsh2015stochastic
%A Ravanbakhsh, Siamak
%A Poczos, Barnabas
%A Schneider, Jeff
%A Schuurmans, Dale
%A Greiner, Russell
%D 2015
%K acreuser deeplearning tutorial
%T Stochastic Neural Networks with Monotonic Activation Functions
%U http://arxiv.org/abs/1601.00034
%X We propose a Laplace approximation that creates a stochastic unit from any
smooth monotonic activation function, using only Gaussian noise. This paper
investigates the application of this stochastic approximation in training a
family of Restricted Boltzmann Machines (RBM) that are closely linked to
Bregman divergences. This family, that we call exponential family RBM
(Exp-RBM), is a subset of the exponential family Harmoniums that expresses
family members through a choice of smooth monotonic non-linearity for each
neuron. Using contrastive divergence along with our Gaussian approximation, we
show that Exp-RBM can learn useful representations using novel stochastic
units.
@misc{ravanbakhsh2015stochastic,
abstract = {We propose a Laplace approximation that creates a stochastic unit from any
smooth monotonic activation function, using only Gaussian noise. This paper
investigates the application of this stochastic approximation in training a
family of Restricted Boltzmann Machines (RBM) that are closely linked to
Bregman divergences. This family, that we call exponential family RBM
(Exp-RBM), is a subset of the exponential family Harmoniums that expresses
family members through a choice of smooth monotonic non-linearity for each
neuron. Using contrastive divergence along with our Gaussian approximation, we
show that Exp-RBM can learn useful representations using novel stochastic
units.},
added-at = {2016-04-15T06:55:03.000+0200},
author = {Ravanbakhsh, Siamak and Poczos, Barnabas and Schneider, Jeff and Schuurmans, Dale and Greiner, Russell},
biburl = {https://www.bibsonomy.org/bibtex/281769fd22f517fb3f0bd34e1f217987b/pixor},
description = {1601.00034v2.pdf},
interhash = {7aa66d6b80a277112d6b6b75e742efdd},
intrahash = {81769fd22f517fb3f0bd34e1f217987b},
keywords = {acreuser deeplearning tutorial},
note = {cite arxiv:1601.00034v2.pdfComment: AISTATS 2016},
timestamp = {2016-04-15T06:55:03.000+0200},
title = {Stochastic Neural Networks with Monotonic Activation Functions},
url = {http://arxiv.org/abs/1601.00034},
year = 2015
}