T. Wong. (2017)cite arxiv:1712.03610Comment: 28 pages.
Abstract
Divergences, also known as contrast functions, are distance-like quantities
defined on manifolds of non-negative or probability measures and they arise in
various theoretical and applied problems. Using ideas in optimal transport, we
introduce and study a parameterized family of $L^(\alpha)$-divergences
which includes the Bregman divergence corresponding to the Euclidean quadratic
cost, and the $L$-divergence introduced by Pal and Wong in connection with
portfolio theory and a logarithmic cost function. Using this unified framework
which elucidates the arguments in our previous work, we prove that these
divergences induce geometric structures that are dually projectively flat with
constant curvatures, and the generalized Pythagorean theorem holds true.
Conversely, we show that if a statistical manifold is dually projectively flat
with constant curvature $\alpha$ with $> 0$, then it is locally
induced by an $L^(\alpha)$-divergence. We define in this context a
canonical divergence which extends the one for dually flat manifolds. Finally,
we study generalizations of exponential family and show that the $L^(\pm
\alpha)$-divergence of the corresponding potential functions gives the
Rényi divergence.
%0 Journal Article
%1 wong2017statistical
%A Wong, Ting-Kam Leonard
%D 2017
%K divergence
%T Statistical manifolds from optimal transport
%U http://arxiv.org/abs/1712.03610
%X Divergences, also known as contrast functions, are distance-like quantities
defined on manifolds of non-negative or probability measures and they arise in
various theoretical and applied problems. Using ideas in optimal transport, we
introduce and study a parameterized family of $L^(\alpha)$-divergences
which includes the Bregman divergence corresponding to the Euclidean quadratic
cost, and the $L$-divergence introduced by Pal and Wong in connection with
portfolio theory and a logarithmic cost function. Using this unified framework
which elucidates the arguments in our previous work, we prove that these
divergences induce geometric structures that are dually projectively flat with
constant curvatures, and the generalized Pythagorean theorem holds true.
Conversely, we show that if a statistical manifold is dually projectively flat
with constant curvature $\alpha$ with $> 0$, then it is locally
induced by an $L^(\alpha)$-divergence. We define in this context a
canonical divergence which extends the one for dually flat manifolds. Finally,
we study generalizations of exponential family and show that the $L^(\pm
\alpha)$-divergence of the corresponding potential functions gives the
Rényi divergence.
@article{wong2017statistical,
abstract = {Divergences, also known as contrast functions, are distance-like quantities
defined on manifolds of non-negative or probability measures and they arise in
various theoretical and applied problems. Using ideas in optimal transport, we
introduce and study a parameterized family of $L^{(\pm \alpha)}$-divergences
which includes the Bregman divergence corresponding to the Euclidean quadratic
cost, and the $L$-divergence introduced by Pal and Wong in connection with
portfolio theory and a logarithmic cost function. Using this unified framework
which elucidates the arguments in our previous work, we prove that these
divergences induce geometric structures that are dually projectively flat with
constant curvatures, and the generalized Pythagorean theorem holds true.
Conversely, we show that if a statistical manifold is dually projectively flat
with constant curvature $\pm \alpha$ with $\alpha > 0$, then it is locally
induced by an $L^{(\mp \alpha)}$-divergence. We define in this context a
canonical divergence which extends the one for dually flat manifolds. Finally,
we study generalizations of exponential family and show that the $L^{(\pm
\alpha)}$-divergence of the corresponding potential functions gives the
R\'{e}nyi divergence.},
added-at = {2017-12-13T19:22:31.000+0100},
author = {Wong, Ting-Kam Leonard},
biburl = {https://www.bibsonomy.org/bibtex/24851f2cb966da638d0d0a20fa17dc280/claired},
description = {Statistical manifolds from optimal transport},
interhash = {425055e15c0ef4449f1be7ca141279b4},
intrahash = {4851f2cb966da638d0d0a20fa17dc280},
keywords = {divergence},
note = {cite arxiv:1712.03610Comment: 28 pages},
timestamp = {2017-12-13T19:22:31.000+0100},
title = {Statistical manifolds from optimal transport},
url = {http://arxiv.org/abs/1712.03610},
year = 2017
}