This paper is about variable selection with the random forests algorithm in
presence of correlated predictors. In high-dimensional regression or
classification frameworks, variable selection is a difficult task, that becomes
even more challenging in the presence of highly correlated predictors. Firstly
we provide a theoretical study of the permutation importance measure for an
additive regression model. This allows us to describe how the correlation
between predictors impacts the permutation importance. Our results motivate the
use of the Recursive Feature Elimination (RFE) algorithm for variable selection
in this context. This algorithm recursively eliminates the variables using
permutation importance measure as a ranking criterion. Next various simulation
experiments illustrate the efficiency of the RFE algorithm for selecting a
small number of variables together with a good prediction error. Finally, this
selection algorithm is tested on the Landsat Satellite data from the UCI
Machine Learning Repository.
Description
Correlation and variable importance in random forests
%0 Generic
%1 gregorutti2013correlation
%A Gregorutti, Baptiste
%A Michel, Bertrand
%A Saint-Pierre, Philippe
%D 2013
%K machine-learinng
%R 10.1007/s11222-016-9646-1
%T Correlation and variable importance in random forests
%U http://arxiv.org/abs/1310.5726
%X This paper is about variable selection with the random forests algorithm in
presence of correlated predictors. In high-dimensional regression or
classification frameworks, variable selection is a difficult task, that becomes
even more challenging in the presence of highly correlated predictors. Firstly
we provide a theoretical study of the permutation importance measure for an
additive regression model. This allows us to describe how the correlation
between predictors impacts the permutation importance. Our results motivate the
use of the Recursive Feature Elimination (RFE) algorithm for variable selection
in this context. This algorithm recursively eliminates the variables using
permutation importance measure as a ranking criterion. Next various simulation
experiments illustrate the efficiency of the RFE algorithm for selecting a
small number of variables together with a good prediction error. Finally, this
selection algorithm is tested on the Landsat Satellite data from the UCI
Machine Learning Repository.
@misc{gregorutti2013correlation,
abstract = {This paper is about variable selection with the random forests algorithm in
presence of correlated predictors. In high-dimensional regression or
classification frameworks, variable selection is a difficult task, that becomes
even more challenging in the presence of highly correlated predictors. Firstly
we provide a theoretical study of the permutation importance measure for an
additive regression model. This allows us to describe how the correlation
between predictors impacts the permutation importance. Our results motivate the
use of the Recursive Feature Elimination (RFE) algorithm for variable selection
in this context. This algorithm recursively eliminates the variables using
permutation importance measure as a ranking criterion. Next various simulation
experiments illustrate the efficiency of the RFE algorithm for selecting a
small number of variables together with a good prediction error. Finally, this
selection algorithm is tested on the Landsat Satellite data from the UCI
Machine Learning Repository.},
added-at = {2020-02-16T00:34:53.000+0100},
author = {Gregorutti, Baptiste and Michel, Bertrand and Saint-Pierre, Philippe},
biburl = {https://www.bibsonomy.org/bibtex/25090e0b7090463b77b0c71dd4b23b82c/stdiff},
description = {Correlation and variable importance in random forests},
doi = {10.1007/s11222-016-9646-1},
interhash = {0a13051b5241d04af64661e25ae2e0eb},
intrahash = {5090e0b7090463b77b0c71dd4b23b82c},
keywords = {machine-learinng},
note = {cite arxiv:1310.5726},
timestamp = {2020-02-16T00:34:53.000+0100},
title = {Correlation and variable importance in random forests},
url = {http://arxiv.org/abs/1310.5726},
year = 2013
}