L. Breiman. Machine Learning, 24 (2):
123--140(1996)
Abstract
Bagging predictors is a method for generating multiple versions of a predictor and using these to get an aggregated predictor. The aggregation averages over the versions when predicting a numerical outcome and does a plurality vote when predicting a class. The multiple versions are formed by making bootstrap replicates of the learning set and using these as new learning sets. Tests on real and simulated data sets using classification and regression trees and subset selection in linear regression show that bagging can give substantial gains in accuracy. The vital element is the instability of the prediction method. If perturbing the learning set can cause significant changes in the predictor constructed, then bagging can improve accuracy.
%0 Journal Article
%1 breiman1996bagging
%A Breiman, Leo
%D 1996
%J Machine Learning
%K clustering, machine-learning
%N 2
%P 123--140
%T Bagging Predictors
%U http://citeseer.ist.psu.edu/breiman96bagging.html
%V 24
%X Bagging predictors is a method for generating multiple versions of a predictor and using these to get an aggregated predictor. The aggregation averages over the versions when predicting a numerical outcome and does a plurality vote when predicting a class. The multiple versions are formed by making bootstrap replicates of the learning set and using these as new learning sets. Tests on real and simulated data sets using classification and regression trees and subset selection in linear regression show that bagging can give substantial gains in accuracy. The vital element is the instability of the prediction method. If perturbing the learning set can cause significant changes in the predictor constructed, then bagging can improve accuracy.
@article{breiman1996bagging,
abstract = {Bagging predictors is a method for generating multiple versions of a predictor and using these to get an aggregated predictor. The aggregation averages over the versions when predicting a numerical outcome and does a plurality vote when predicting a class. The multiple versions are formed by making bootstrap replicates of the learning set and using these as new learning sets. Tests on real and simulated data sets using classification and regression trees and subset selection in linear regression show that bagging can give substantial gains in accuracy. The vital element is the instability of the prediction method. If perturbing the learning set can cause significant changes in the predictor constructed, then bagging can improve accuracy.},
added-at = {2009-08-06T15:16:38.000+0200},
author = {Breiman, Leo},
biburl = {https://www.bibsonomy.org/bibtex/2d7f2fcb651038d60087aa9d4a046eef8/chato},
citeulike-article-id = {167627},
citeulike-linkout-0 = {http://citeseer.ist.psu.edu/breiman96bagging.html},
citeulike-linkout-1 = {http://citeseer.lcs.mit.edu/breiman96bagging.html},
citeulike-linkout-2 = {http://citeseer.ifi.unizh.ch/breiman96bagging.html},
citeulike-linkout-3 = {http://citeseer.comp.nus.edu.sg/breiman96bagging.html},
citeulike-linkout-4 = {http://www.biostat.jhsph.edu/\~{}kbroman/teaching/statgen/2004/refs/bagging.pdf},
interhash = {96b419212a439d785711b0d79e00332d},
intrahash = {d7f2fcb651038d60087aa9d4a046eef8},
journal = {Machine Learning},
keywords = {clustering, machine-learning},
number = 2,
pages = {123--140},
posted-at = {2006-09-27 11:14:58},
priority = {0},
timestamp = {2009-08-06T15:16:51.000+0200},
title = {Bagging Predictors},
url = {http://citeseer.ist.psu.edu/breiman96bagging.html},
volume = 24,
year = 1996
}