Motivated by the pursuit of a systematic computational and algorithmic
understanding of Generative Adversarial Networks (GANs), we present a simple
yet unified non-asymptotic local convergence theory for smooth two-player
games, which subsumes several discrete-time gradient-based saddle point
dynamics. The analysis reveals the surprising nature of the off-diagonal
interaction term as both a blessing and a curse. On the one hand, this
interaction term explains the origin of the slow-down effect in the convergence
of Simultaneous Gradient Ascent (SGA) to stable Nash equilibria. On the other
hand, for the unstable equilibria, exponential convergence can be proved thanks
to the interaction term, for three modified dynamics which have been proposed
to stabilize GAN training: Optimistic Mirror Descent (OMD), Consensus
Optimization (CO) and Predictive Method (PM). The analysis uncovers the
intimate connections among these stabilizing techniques, and provides detailed
characterization on the choice of learning rate.
Description
Interaction Matters: A Note on Non-asymptotic Local Convergence of
Generative Adversarial Networks
%0 Generic
%1 liang2018interaction
%A Liang, Tengyuan
%A Stokes, James
%D 2018
%K GAN
%T Interaction Matters: A Note on Non-asymptotic Local Convergence of
Generative Adversarial Networks
%U http://arxiv.org/abs/1802.06132
%X Motivated by the pursuit of a systematic computational and algorithmic
understanding of Generative Adversarial Networks (GANs), we present a simple
yet unified non-asymptotic local convergence theory for smooth two-player
games, which subsumes several discrete-time gradient-based saddle point
dynamics. The analysis reveals the surprising nature of the off-diagonal
interaction term as both a blessing and a curse. On the one hand, this
interaction term explains the origin of the slow-down effect in the convergence
of Simultaneous Gradient Ascent (SGA) to stable Nash equilibria. On the other
hand, for the unstable equilibria, exponential convergence can be proved thanks
to the interaction term, for three modified dynamics which have been proposed
to stabilize GAN training: Optimistic Mirror Descent (OMD), Consensus
Optimization (CO) and Predictive Method (PM). The analysis uncovers the
intimate connections among these stabilizing techniques, and provides detailed
characterization on the choice of learning rate.
@misc{liang2018interaction,
abstract = {Motivated by the pursuit of a systematic computational and algorithmic
understanding of Generative Adversarial Networks (GANs), we present a simple
yet unified non-asymptotic local convergence theory for smooth two-player
games, which subsumes several discrete-time gradient-based saddle point
dynamics. The analysis reveals the surprising nature of the off-diagonal
interaction term as both a blessing and a curse. On the one hand, this
interaction term explains the origin of the slow-down effect in the convergence
of Simultaneous Gradient Ascent (SGA) to stable Nash equilibria. On the other
hand, for the unstable equilibria, exponential convergence can be proved thanks
to the interaction term, for three modified dynamics which have been proposed
to stabilize GAN training: Optimistic Mirror Descent (OMD), Consensus
Optimization (CO) and Predictive Method (PM). The analysis uncovers the
intimate connections among these stabilizing techniques, and provides detailed
characterization on the choice of learning rate.},
added-at = {2018-02-20T09:57:58.000+0100},
author = {Liang, Tengyuan and Stokes, James},
biburl = {https://www.bibsonomy.org/bibtex/23ecedb8951bdb33457eaf75ec0428d9d/jk_itwm},
description = {Interaction Matters: A Note on Non-asymptotic Local Convergence of
Generative Adversarial Networks},
interhash = {86ac79aa9dc99f5bf1c9da90d629ac8f},
intrahash = {3ecedb8951bdb33457eaf75ec0428d9d},
keywords = {GAN},
note = {cite arxiv:1802.06132},
timestamp = {2018-02-20T09:57:58.000+0100},
title = {Interaction Matters: A Note on Non-asymptotic Local Convergence of
Generative Adversarial Networks},
url = {http://arxiv.org/abs/1802.06132},
year = 2018
}