Neural networks are known to be a class of highly expressive functions able
to fit even random input-output mappings with $100\%$ accuracy. In this work,
we present properties of neural networks that complement this aspect of
expressivity. By using tools from Fourier analysis, we show that deep ReLU
networks are biased towards low frequency functions, meaning that they cannot
have local fluctuations without affecting their global behavior. Intuitively,
this property is in line with the observation that over-parameterized networks
find simple patterns that generalize across data samples. We also investigate
how the shape of the data manifold affects expressivity by showing evidence
that learning high frequencies gets easier with increasing manifold
complexity, and present a theoretical understanding of this behavior. Finally,
we study the robustness of the frequency components with respect to parameter
perturbation, to develop the intuition that the parameters must be finely tuned
to express high frequency functions.
%0 Generic
%1 rahaman2018spectral
%A Rahaman, Nasim
%A Baratin, Aristide
%A Arpit, Devansh
%A Draxler, Felix
%A Lin, Min
%A Hamprecht, Fred A.
%A Bengio, Yoshua
%A Courville, Aaron
%D 2018
%K ai math relu spectral
%T On the Spectral Bias of Neural Networks
%U http://arxiv.org/abs/1806.08734
%X Neural networks are known to be a class of highly expressive functions able
to fit even random input-output mappings with $100\%$ accuracy. In this work,
we present properties of neural networks that complement this aspect of
expressivity. By using tools from Fourier analysis, we show that deep ReLU
networks are biased towards low frequency functions, meaning that they cannot
have local fluctuations without affecting their global behavior. Intuitively,
this property is in line with the observation that over-parameterized networks
find simple patterns that generalize across data samples. We also investigate
how the shape of the data manifold affects expressivity by showing evidence
that learning high frequencies gets easier with increasing manifold
complexity, and present a theoretical understanding of this behavior. Finally,
we study the robustness of the frequency components with respect to parameter
perturbation, to develop the intuition that the parameters must be finely tuned
to express high frequency functions.
@misc{rahaman2018spectral,
abstract = {Neural networks are known to be a class of highly expressive functions able
to fit even random input-output mappings with $100\%$ accuracy. In this work,
we present properties of neural networks that complement this aspect of
expressivity. By using tools from Fourier analysis, we show that deep ReLU
networks are biased towards low frequency functions, meaning that they cannot
have local fluctuations without affecting their global behavior. Intuitively,
this property is in line with the observation that over-parameterized networks
find simple patterns that generalize across data samples. We also investigate
how the shape of the data manifold affects expressivity by showing evidence
that learning high frequencies gets \emph{easier} with increasing manifold
complexity, and present a theoretical understanding of this behavior. Finally,
we study the robustness of the frequency components with respect to parameter
perturbation, to develop the intuition that the parameters must be finely tuned
to express high frequency functions.},
added-at = {2021-02-03T09:54:02.000+0100},
author = {Rahaman, Nasim and Baratin, Aristide and Arpit, Devansh and Draxler, Felix and Lin, Min and Hamprecht, Fred A. and Bengio, Yoshua and Courville, Aaron},
biburl = {https://www.bibsonomy.org/bibtex/24297aafa6045daa9b53b66e9934d5f6d/louissf},
description = {On the Spectral Bias of Neural Networks},
interhash = {ed126edbcd89f7ae987f99aaf8d8133f},
intrahash = {4297aafa6045daa9b53b66e9934d5f6d},
keywords = {ai math relu spectral},
note = {cite arxiv:1806.08734Comment: 23 pages},
timestamp = {2021-02-03T09:56:10.000+0100},
title = {On the Spectral Bias of Neural Networks},
url = {http://arxiv.org/abs/1806.08734},
year = 2018
}