We introduce a novel approach to training generative adversarial networks,
where we train a generator to match a target distribution that converges to the
data distribution at the limit of a perfect discriminator. This objective can
be interpreted as training a generator to produce samples that lie on the
decision boundary of a current discriminator in training at each update, and we
call a GAN trained using this algorithm a boundary-seeking GAN (BS-GAN). This
approach can be used to train a generator with discrete output when the
generator outputs a parametric conditional distribution. We demonstrate the
effectiveness of the proposed algorithm with discrete image data. In contrary
to the proposed algorithm, we observe that the recently proposed Gumbel-Softmax
technique for re-parametrizing the discrete variables does not work for
training a GAN with discrete data. Finally, we notice that the proposed
boundary-seeking algorithm works even with continuous variables, and
demonstrate its effectiveness with two widely used image data sets, SVHN and
CelebA.
%0 Generic
%1 hjelm2017boundaryseeking
%A Hjelm, R Devon
%A Jacob, Athul Paul
%A Che, Tong
%A Cho, Kyunghyun
%A Bengio, Yoshua
%D 2017
%K dro
%T Boundary-Seeking Generative Adversarial Networks
%U http://arxiv.org/abs/1702.08431
%X We introduce a novel approach to training generative adversarial networks,
where we train a generator to match a target distribution that converges to the
data distribution at the limit of a perfect discriminator. This objective can
be interpreted as training a generator to produce samples that lie on the
decision boundary of a current discriminator in training at each update, and we
call a GAN trained using this algorithm a boundary-seeking GAN (BS-GAN). This
approach can be used to train a generator with discrete output when the
generator outputs a parametric conditional distribution. We demonstrate the
effectiveness of the proposed algorithm with discrete image data. In contrary
to the proposed algorithm, we observe that the recently proposed Gumbel-Softmax
technique for re-parametrizing the discrete variables does not work for
training a GAN with discrete data. Finally, we notice that the proposed
boundary-seeking algorithm works even with continuous variables, and
demonstrate its effectiveness with two widely used image data sets, SVHN and
CelebA.
@misc{hjelm2017boundaryseeking,
abstract = {We introduce a novel approach to training generative adversarial networks,
where we train a generator to match a target distribution that converges to the
data distribution at the limit of a perfect discriminator. This objective can
be interpreted as training a generator to produce samples that lie on the
decision boundary of a current discriminator in training at each update, and we
call a GAN trained using this algorithm a boundary-seeking GAN (BS-GAN). This
approach can be used to train a generator with discrete output when the
generator outputs a parametric conditional distribution. We demonstrate the
effectiveness of the proposed algorithm with discrete image data. In contrary
to the proposed algorithm, we observe that the recently proposed Gumbel-Softmax
technique for re-parametrizing the discrete variables does not work for
training a GAN with discrete data. Finally, we notice that the proposed
boundary-seeking algorithm works even with continuous variables, and
demonstrate its effectiveness with two widely used image data sets, SVHN and
CelebA.},
added-at = {2017-02-28T21:18:45.000+0100},
author = {Hjelm, R Devon and Jacob, Athul Paul and Che, Tong and Cho, Kyunghyun and Bengio, Yoshua},
biburl = {https://www.bibsonomy.org/bibtex/26b3a31a1555455be3f70f1c203f16f76/jonathandinu},
description = {[1702.08431] Boundary-Seeking Generative Adversarial Networks},
interhash = {74ce108eaea93062b20ba3997657e70f},
intrahash = {6b3a31a1555455be3f70f1c203f16f76},
keywords = {dro},
note = {cite arxiv:1702.08431},
timestamp = {2017-02-28T21:18:45.000+0100},
title = {Boundary-Seeking Generative Adversarial Networks},
url = {http://arxiv.org/abs/1702.08431},
year = 2017
}