O. Ludwig. ArXiv, (April 2016)cite arxiv:1604.06985.
Abstract
This paper extends our previous work on regularization of neural networks
using Eigenvalue Decay by employing a soft approximation of the dominant
eigenvalue in order to enable the calculation of its derivatives in relation to
the synaptic weights, and therefore the application of back-propagation, which
is a primary demand for deep learning. Moreover, we extend our previous
theoretical analysis to deep neural networks and multiclass classification
problems. Our method is implemented as an additional regularizer in Keras, a
modular neural networks library written in Python, and evaluated in the
benchmark data sets Reuters Newswire Topics Classification, IMDB database for
binary sentiment classification and MNIST database of handwritten digits.
%0 Journal Article
%1 ludwig2016learning
%A Ludwig, Oswaldo
%D 2016
%E ArXiv,
%J ArXiv
%K myown
%N arXiv:1604.06985
%T Deep Learning with Eigenvalue Decay Regularizer
%U http://arxiv.org/abs/1604.06985
%V ArXiv eprint
%X This paper extends our previous work on regularization of neural networks
using Eigenvalue Decay by employing a soft approximation of the dominant
eigenvalue in order to enable the calculation of its derivatives in relation to
the synaptic weights, and therefore the application of back-propagation, which
is a primary demand for deep learning. Moreover, we extend our previous
theoretical analysis to deep neural networks and multiclass classification
problems. Our method is implemented as an additional regularizer in Keras, a
modular neural networks library written in Python, and evaluated in the
benchmark data sets Reuters Newswire Topics Classification, IMDB database for
binary sentiment classification and MNIST database of handwritten digits.
@article{ludwig2016learning,
abstract = {This paper extends our previous work on regularization of neural networks
using Eigenvalue Decay by employing a soft approximation of the dominant
eigenvalue in order to enable the calculation of its derivatives in relation to
the synaptic weights, and therefore the application of back-propagation, which
is a primary demand for deep learning. Moreover, we extend our previous
theoretical analysis to deep neural networks and multiclass classification
problems. Our method is implemented as an additional regularizer in Keras, a
modular neural networks library written in Python, and evaluated in the
benchmark data sets Reuters Newswire Topics Classification, IMDB database for
binary sentiment classification and MNIST database of handwritten digits.},
added-at = {2016-04-28T00:44:24.000+0200},
author = {Ludwig, Oswaldo},
biburl = {https://www.bibsonomy.org/bibtex/24e00e4da0f9bd5f00322bf9f22806143/oswaldoludwig},
description = {Deep Learning with Eigenvalue Decay Regularizer},
editor = {ArXiv},
interhash = {543ecc71f34b36959640fbade71fb5f3},
intrahash = {4e00e4da0f9bd5f00322bf9f22806143},
journal = {ArXiv},
keywords = {myown},
month = apr,
note = {cite arxiv:1604.06985},
number = {arXiv:1604.06985},
timestamp = {2016-04-28T00:44:24.000+0200},
title = {Deep Learning with Eigenvalue Decay Regularizer},
url = {http://arxiv.org/abs/1604.06985},
volume = {ArXiv eprint},
year = 2016
}