Deep neural networks have revolutionized many real world applications, due to
their flexibility in data fitting and accurate predictions for unseen data. A
line of research reveals that neural networks can approximate certain classes
of functions with an arbitrary accuracy, while the size of the network scales
exponentially with respect to the data dimension. Empirical results, however,
suggest that networks of moderate size already yield appealing performance. To
explain such a gap, a common belief is that many data sets exhibit low
dimensional structures, and can be modeled as samples near a low dimensional
manifold. In this paper, we prove that neural networks can efficiently
approximate functions supported on low dimensional manifolds. The network size
scales exponentially in the approximation error, with an exponent depending on
the intrinsic dimension of the data and the smoothness of the function. Our
result shows that exploiting low dimensional data structures can greatly
enhance the efficiency in function approximation by neural networks. We also
implement a sub-network that assigns input data to their corresponding local
neighborhoods, which may be of independent interest.
Description
[1908.01842] Efficient Approximation of Deep ReLU Networks for Functions on Low Dimensional Manifolds
%0 Journal Article
%1 chen2019efficient
%A Chen, Minshuo
%A Jiang, Haoming
%A Liao, Wenjing
%A Zhao, Tuo
%D 2019
%K approximate deep-learning manifolds
%T Efficient Approximation of Deep ReLU Networks for Functions on Low
Dimensional Manifolds
%U http://arxiv.org/abs/1908.01842
%X Deep neural networks have revolutionized many real world applications, due to
their flexibility in data fitting and accurate predictions for unseen data. A
line of research reveals that neural networks can approximate certain classes
of functions with an arbitrary accuracy, while the size of the network scales
exponentially with respect to the data dimension. Empirical results, however,
suggest that networks of moderate size already yield appealing performance. To
explain such a gap, a common belief is that many data sets exhibit low
dimensional structures, and can be modeled as samples near a low dimensional
manifold. In this paper, we prove that neural networks can efficiently
approximate functions supported on low dimensional manifolds. The network size
scales exponentially in the approximation error, with an exponent depending on
the intrinsic dimension of the data and the smoothness of the function. Our
result shows that exploiting low dimensional data structures can greatly
enhance the efficiency in function approximation by neural networks. We also
implement a sub-network that assigns input data to their corresponding local
neighborhoods, which may be of independent interest.
@article{chen2019efficient,
abstract = {Deep neural networks have revolutionized many real world applications, due to
their flexibility in data fitting and accurate predictions for unseen data. A
line of research reveals that neural networks can approximate certain classes
of functions with an arbitrary accuracy, while the size of the network scales
exponentially with respect to the data dimension. Empirical results, however,
suggest that networks of moderate size already yield appealing performance. To
explain such a gap, a common belief is that many data sets exhibit low
dimensional structures, and can be modeled as samples near a low dimensional
manifold. In this paper, we prove that neural networks can efficiently
approximate functions supported on low dimensional manifolds. The network size
scales exponentially in the approximation error, with an exponent depending on
the intrinsic dimension of the data and the smoothness of the function. Our
result shows that exploiting low dimensional data structures can greatly
enhance the efficiency in function approximation by neural networks. We also
implement a sub-network that assigns input data to their corresponding local
neighborhoods, which may be of independent interest.},
added-at = {2020-01-13T14:09:33.000+0100},
author = {Chen, Minshuo and Jiang, Haoming and Liao, Wenjing and Zhao, Tuo},
biburl = {https://www.bibsonomy.org/bibtex/282551b534e75aa256820812c48a90b63/kirk86},
description = {[1908.01842] Efficient Approximation of Deep ReLU Networks for Functions on Low Dimensional Manifolds},
interhash = {fa8ad338039bc7a2974ab1e4695a957b},
intrahash = {82551b534e75aa256820812c48a90b63},
keywords = {approximate deep-learning manifolds},
note = {cite arxiv:1908.01842},
timestamp = {2020-01-13T14:09:33.000+0100},
title = {Efficient Approximation of Deep ReLU Networks for Functions on Low
Dimensional Manifolds},
url = {http://arxiv.org/abs/1908.01842},
year = 2019
}