We present new PAC-Bayesian generalisation bounds for learning problems with
unbounded loss functions. This extends the relevance and applicability of the
PAC-Bayes learning framework, where most of the existing literature focuses on
supervised learning problems where the loss function is bounded (typically
assumed to take values in the interval 0;1). In order to relax this
assumption, we propose a new notion called the special boundedness
condition, which effectively allows the range of the loss to depend on each
predictor. Based on this new notion we derive a novel PAC-Bayesian
generalisation bound for unbounded loss functions, and we instantiate it on a
linear regression problem. To make our theory usable by the largest audience
possible, we include discussions on actual computation, practicality and
limitations of our assumptions.
Description
[2006.07279] PAC-Bayes unleashed: generalisation bounds with unbounded losses
%0 Journal Article
%1 haddouche2020pacbayes
%A Haddouche, Maxime
%A Guedj, Benjamin
%A Rivasplata, Omar
%A Shawe-Taylor, John
%D 2020
%K bayesian bounds generalization objectives readings
%T PAC-Bayes unleashed: generalisation bounds with unbounded losses
%U http://arxiv.org/abs/2006.07279
%X We present new PAC-Bayesian generalisation bounds for learning problems with
unbounded loss functions. This extends the relevance and applicability of the
PAC-Bayes learning framework, where most of the existing literature focuses on
supervised learning problems where the loss function is bounded (typically
assumed to take values in the interval 0;1). In order to relax this
assumption, we propose a new notion called the special boundedness
condition, which effectively allows the range of the loss to depend on each
predictor. Based on this new notion we derive a novel PAC-Bayesian
generalisation bound for unbounded loss functions, and we instantiate it on a
linear regression problem. To make our theory usable by the largest audience
possible, we include discussions on actual computation, practicality and
limitations of our assumptions.
@article{haddouche2020pacbayes,
abstract = {We present new PAC-Bayesian generalisation bounds for learning problems with
unbounded loss functions. This extends the relevance and applicability of the
PAC-Bayes learning framework, where most of the existing literature focuses on
supervised learning problems where the loss function is bounded (typically
assumed to take values in the interval [0;1]). In order to relax this
assumption, we propose a new notion called the \emph{special boundedness
condition}, which effectively allows the range of the loss to depend on each
predictor. Based on this new notion we derive a novel PAC-Bayesian
generalisation bound for unbounded loss functions, and we instantiate it on a
linear regression problem. To make our theory usable by the largest audience
possible, we include discussions on actual computation, practicality and
limitations of our assumptions.},
added-at = {2020-06-18T19:37:36.000+0200},
author = {Haddouche, Maxime and Guedj, Benjamin and Rivasplata, Omar and Shawe-Taylor, John},
biburl = {https://www.bibsonomy.org/bibtex/2ae44b740a928ea8357c73ad520e36ece/kirk86},
description = {[2006.07279] PAC-Bayes unleashed: generalisation bounds with unbounded losses},
interhash = {d3fdae9e117bf1a5103144d8d67275df},
intrahash = {ae44b740a928ea8357c73ad520e36ece},
keywords = {bayesian bounds generalization objectives readings},
note = {cite arxiv:2006.07279},
timestamp = {2020-06-18T19:37:36.000+0200},
title = {PAC-Bayes unleashed: generalisation bounds with unbounded losses},
url = {http://arxiv.org/abs/2006.07279},
year = 2020
}