We present an extensive study of generalization for data-dependent hypothesis
sets. We give a general learning guarantee for data-dependent hypothesis sets
based on a notion of transductive Rademacher complexity. Our main results are
two generalization bounds for data-dependent hypothesis sets expressed in terms
of a notion of hypothesis set stability and a notion of Rademacher complexity
for data-dependent hypothesis sets that we introduce. These bounds admit as
special cases both standard Rademacher complexity bounds and
algorithm-dependent uniform stability bounds. We also illustrate the use of
these learning bounds in the analysis of several scenarios.
Description
[1904.04755] Hypothesis Set Stability and Generalization
%0 Journal Article
%1 foster2019hypothesis
%A Foster, Dylan J.
%A Greenberg, Spencer
%A Kale, Satyen
%A Luo, Haipeng
%A Mohri, Mehryar
%A Sridharan, Karthik
%D 2019
%K bounds deep-learning generalization theory
%T Hypothesis Set Stability and Generalization
%U http://arxiv.org/abs/1904.04755
%X We present an extensive study of generalization for data-dependent hypothesis
sets. We give a general learning guarantee for data-dependent hypothesis sets
based on a notion of transductive Rademacher complexity. Our main results are
two generalization bounds for data-dependent hypothesis sets expressed in terms
of a notion of hypothesis set stability and a notion of Rademacher complexity
for data-dependent hypothesis sets that we introduce. These bounds admit as
special cases both standard Rademacher complexity bounds and
algorithm-dependent uniform stability bounds. We also illustrate the use of
these learning bounds in the analysis of several scenarios.
@article{foster2019hypothesis,
abstract = {We present an extensive study of generalization for data-dependent hypothesis
sets. We give a general learning guarantee for data-dependent hypothesis sets
based on a notion of transductive Rademacher complexity. Our main results are
two generalization bounds for data-dependent hypothesis sets expressed in terms
of a notion of hypothesis set stability and a notion of Rademacher complexity
for data-dependent hypothesis sets that we introduce. These bounds admit as
special cases both standard Rademacher complexity bounds and
algorithm-dependent uniform stability bounds. We also illustrate the use of
these learning bounds in the analysis of several scenarios.},
added-at = {2019-04-15T12:17:49.000+0200},
author = {Foster, Dylan J. and Greenberg, Spencer and Kale, Satyen and Luo, Haipeng and Mohri, Mehryar and Sridharan, Karthik},
biburl = {https://www.bibsonomy.org/bibtex/20fbd98536d8b3cf6bea8b152919085f3/kirk86},
description = {[1904.04755] Hypothesis Set Stability and Generalization},
interhash = {a7b55106fe4c6eb51bd675022322f390},
intrahash = {0fbd98536d8b3cf6bea8b152919085f3},
keywords = {bounds deep-learning generalization theory},
note = {cite arxiv:1904.04755},
timestamp = {2019-04-15T12:17:49.000+0200},
title = {Hypothesis Set Stability and Generalization},
url = {http://arxiv.org/abs/1904.04755},
year = 2019
}