We consider the problem of selecting a model having the best predictive ability among a class of linear models. The popular leave-
one-out cross-validation method, which is asymptotically equivalent to many other model selection methods such as the Akaike information criterion (AIC), the Cp, and the bootstrap, is asymptotically in the sense that the probability inconsistent of selecting the model with the best predictive ability does not converge to 1 as the total number of observations n -s o. We show that the of the leave-one-out cross-validation inconsistency can be rectified by using a leave-n,-out with nv, the number of cross-validation observations reserved for validation, satisfying no/n -1 I as n s* xoo. This is a somewhat shocking discovery, because ne/n -* 1 is totally opposite to the popular leave-one-out recipe in cross-validation. and discussions of some practical Motivations, justifications, aspects of the use of the leave-n,-out cross-validation method are provided, and results from a simulation study are presented
%0 Journal Article
%1 shao_linear_1993
%A Shao, Jun
%D 1993
%J Journal of the American Statistical Association
%K AIC, consistency, cross-validation, model selection
%N 422
%P 486--494
%R 10.1080/01621459.1993.10476299
%T Linear Model selection by cross-validation
%V 88
%X We consider the problem of selecting a model having the best predictive ability among a class of linear models. The popular leave-
one-out cross-validation method, which is asymptotically equivalent to many other model selection methods such as the Akaike information criterion (AIC), the Cp, and the bootstrap, is asymptotically in the sense that the probability inconsistent of selecting the model with the best predictive ability does not converge to 1 as the total number of observations n -s o. We show that the of the leave-one-out cross-validation inconsistency can be rectified by using a leave-n,-out with nv, the number of cross-validation observations reserved for validation, satisfying no/n -1 I as n s* xoo. This is a somewhat shocking discovery, because ne/n -* 1 is totally opposite to the popular leave-one-out recipe in cross-validation. and discussions of some practical Motivations, justifications, aspects of the use of the leave-n,-out cross-validation method are provided, and results from a simulation study are presented
@article{shao_linear_1993,
abstract = {We consider the problem of selecting a model having the best predictive ability among a class of linear models. The popular leave-
one-out cross-validation method, which is asymptotically equivalent to many other model selection methods such as the Akaike information criterion (AIC), the Cp, and the bootstrap, is asymptotically in the sense that the probability inconsistent of selecting the model with the best predictive ability does not converge to 1 as the total number of observations n -s o. We show that the of the leave-one-out cross-validation inconsistency can be rectified by using a leave-n,-out with nv, the number of cross-validation observations reserved for validation, satisfying no/n -1 I as n s* xoo. This is a somewhat shocking discovery, because ne/n -* 1 is totally opposite to the popular leave-one-out recipe in cross-validation. and discussions of some practical Motivations, justifications, aspects of the use of the leave-n,-out cross-validation method are provided, and results from a simulation study are presented},
added-at = {2017-01-09T13:57:26.000+0100},
author = {Shao, Jun},
biburl = {https://www.bibsonomy.org/bibtex/28aa064690519ec7b1d91538af6af683b/yourwelcome},
doi = {10.1080/01621459.1993.10476299},
interhash = {9bc26d1cc8c1ab8ec873bf52a4b933a5},
intrahash = {8aa064690519ec7b1d91538af6af683b},
journal = {Journal of the American Statistical Association},
keywords = {AIC, consistency, cross-validation, model selection},
number = 422,
pages = {486--494},
timestamp = {2017-01-09T14:01:11.000+0100},
title = {Linear {Model} selection by cross-validation},
volume = 88,
year = 1993
}