We propose a paradigm to deep-learn the ever-expanding databases which have
emerged in mathematical physics and particle phenomenology, as diverse as the
statistics of string vacua or combinatorial and algebraic geometry. As concrete
examples, we establish multi-layer neural networks as both classifiers and
predictors and train them with a host of available data ranging from Calabi-Yau
manifolds and vector bundles, to quiver representations for gauge theories. We
find that even a relatively simple neural network can learn many significant
quantities to astounding accuracy in a matter of minutes and can also predict
hithertofore unencountered results. This paradigm should prove a valuable tool
in various investigations in landscapes in physics as well as pure mathematics.
%0 Generic
%1 He2017DeepLearning
%A He, Yang-Hui
%D 2017
%K statistics
%T Deep-Learning the Landscape
%U http://arxiv.org/abs/1706.02714
%X We propose a paradigm to deep-learn the ever-expanding databases which have
emerged in mathematical physics and particle phenomenology, as diverse as the
statistics of string vacua or combinatorial and algebraic geometry. As concrete
examples, we establish multi-layer neural networks as both classifiers and
predictors and train them with a host of available data ranging from Calabi-Yau
manifolds and vector bundles, to quiver representations for gauge theories. We
find that even a relatively simple neural network can learn many significant
quantities to astounding accuracy in a matter of minutes and can also predict
hithertofore unencountered results. This paradigm should prove a valuable tool
in various investigations in landscapes in physics as well as pure mathematics.
@misc{He2017DeepLearning,
abstract = {{We propose a paradigm to deep-learn the ever-expanding databases which have
emerged in mathematical physics and particle phenomenology, as diverse as the
statistics of string vacua or combinatorial and algebraic geometry. As concrete
examples, we establish multi-layer neural networks as both classifiers and
predictors and train them with a host of available data ranging from Calabi-Yau
manifolds and vector bundles, to quiver representations for gauge theories. We
find that even a relatively simple neural network can learn many significant
quantities to astounding accuracy in a matter of minutes and can also predict
hithertofore unencountered results. This paradigm should prove a valuable tool
in various investigations in landscapes in physics as well as pure mathematics.}},
added-at = {2019-02-23T22:09:48.000+0100},
archiveprefix = {arXiv},
author = {He, Yang-Hui},
biburl = {https://www.bibsonomy.org/bibtex/20bf5385e1c285272643c26ddf437228e/cmcneile},
citeulike-article-id = {14374368},
citeulike-linkout-0 = {http://arxiv.org/abs/1706.02714},
citeulike-linkout-1 = {http://arxiv.org/pdf/1706.02714},
day = 8,
eprint = {1706.02714},
interhash = {de776c1297640cc0c804b7e8de856dc7},
intrahash = {0bf5385e1c285272643c26ddf437228e},
keywords = {statistics},
month = jun,
posted-at = {2017-06-12 10:07:18},
priority = {2},
timestamp = {2019-02-23T22:15:27.000+0100},
title = {{Deep-Learning the Landscape}},
url = {http://arxiv.org/abs/1706.02714},
year = 2017
}