This paper explores the generalization loss of linear regression in variably
parameterized families of models, both under-parameterized and
over-parameterized. We show that the generalization curve can have an arbitrary
number of peaks, and moreover, locations of those peaks can be explicitly
controlled.
Our results highlight the fact that both classical U-shaped generalization
curve and the recently observed double descent curve are not intrinsic
properties of the model family. Instead, their emergence is due to the
interaction between the properties of the data and the inductive biases of
learning algorithms.
Description
[2008.01036] Multiple Descent: Design Your Own Generalization Curve
%0 Journal Article
%1 chen2020multiple
%A Chen, Lin
%A Min, Yifei
%A Belkin, Mikhail
%A Karbasi, Amin
%D 2020
%K bounds generalization optimization readings
%T Multiple Descent: Design Your Own Generalization Curve
%U http://arxiv.org/abs/2008.01036
%X This paper explores the generalization loss of linear regression in variably
parameterized families of models, both under-parameterized and
over-parameterized. We show that the generalization curve can have an arbitrary
number of peaks, and moreover, locations of those peaks can be explicitly
controlled.
Our results highlight the fact that both classical U-shaped generalization
curve and the recently observed double descent curve are not intrinsic
properties of the model family. Instead, their emergence is due to the
interaction between the properties of the data and the inductive biases of
learning algorithms.
@article{chen2020multiple,
abstract = {This paper explores the generalization loss of linear regression in variably
parameterized families of models, both under-parameterized and
over-parameterized. We show that the generalization curve can have an arbitrary
number of peaks, and moreover, locations of those peaks can be explicitly
controlled.
Our results highlight the fact that both classical U-shaped generalization
curve and the recently observed double descent curve are not intrinsic
properties of the model family. Instead, their emergence is due to the
interaction between the properties of the data and the inductive biases of
learning algorithms.},
added-at = {2020-08-04T11:43:09.000+0200},
author = {Chen, Lin and Min, Yifei and Belkin, Mikhail and Karbasi, Amin},
biburl = {https://www.bibsonomy.org/bibtex/201cfa6c43ae103a440e7f58a799369a0/kirk86},
description = {[2008.01036] Multiple Descent: Design Your Own Generalization Curve},
interhash = {48dec3bfbd5aad17d0a6413a716dbb1b},
intrahash = {01cfa6c43ae103a440e7f58a799369a0},
keywords = {bounds generalization optimization readings},
note = {cite arxiv:2008.01036},
timestamp = {2020-08-04T11:43:09.000+0200},
title = {Multiple Descent: Design Your Own Generalization Curve},
url = {http://arxiv.org/abs/2008.01036},
year = 2020
}