We suggest a novel approach for the estimation of the posterior distribution
of the weights of a neural network, using an online version of the variational
Bayes method. Having a confidence measure of the weights allows to combat
several shortcomings of neural networks, such as their parameter redundancy,
and their notorious vulnerability to the change of input distribution
("catastrophic forgetting"). Specifically, We show that this approach helps
alleviate the catastrophic forgetting phenomenon - even without the knowledge
of when the tasks are been switched. Furthermore, it improves the robustness of
the network to weight pruning - even without re-training.
Beschreibung
Bayesian Gradient Descent: Online Variational Bayes Learning with Increased Robustness to Catastrophic Forgetting and Weight Pruning
%0 Generic
%1 zeno2018bayesian
%A Zeno, Chen
%A Golan, Itay
%A Hoffer, Elad
%A Soudry, Daniel
%D 2018
%K bayesian neural-network
%T Bayesian Gradient Descent: Online Variational Bayes Learning with
Increased Robustness to Catastrophic Forgetting and Weight Pruning
%U http://arxiv.org/abs/1803.10123
%X We suggest a novel approach for the estimation of the posterior distribution
of the weights of a neural network, using an online version of the variational
Bayes method. Having a confidence measure of the weights allows to combat
several shortcomings of neural networks, such as their parameter redundancy,
and their notorious vulnerability to the change of input distribution
("catastrophic forgetting"). Specifically, We show that this approach helps
alleviate the catastrophic forgetting phenomenon - even without the knowledge
of when the tasks are been switched. Furthermore, it improves the robustness of
the network to weight pruning - even without re-training.
@misc{zeno2018bayesian,
abstract = {We suggest a novel approach for the estimation of the posterior distribution
of the weights of a neural network, using an online version of the variational
Bayes method. Having a confidence measure of the weights allows to combat
several shortcomings of neural networks, such as their parameter redundancy,
and their notorious vulnerability to the change of input distribution
("catastrophic forgetting"). Specifically, We show that this approach helps
alleviate the catastrophic forgetting phenomenon - even without the knowledge
of when the tasks are been switched. Furthermore, it improves the robustness of
the network to weight pruning - even without re-training.},
added-at = {2018-11-20T19:46:34.000+0100},
author = {Zeno, Chen and Golan, Itay and Hoffer, Elad and Soudry, Daniel},
biburl = {https://www.bibsonomy.org/bibtex/2ffb85b4aebe84603aa0e143cdc213523/stdiff},
description = {Bayesian Gradient Descent: Online Variational Bayes Learning with Increased Robustness to Catastrophic Forgetting and Weight Pruning},
interhash = {f62d267ca3d70271157086312ad23c12},
intrahash = {ffb85b4aebe84603aa0e143cdc213523},
keywords = {bayesian neural-network},
note = {cite arxiv:1803.10123},
timestamp = {2018-11-20T19:46:34.000+0100},
title = {Bayesian Gradient Descent: Online Variational Bayes Learning with
Increased Robustness to Catastrophic Forgetting and Weight Pruning},
url = {http://arxiv.org/abs/1803.10123},
year = 2018
}