XGBoost is a scalable ensemble technique based on gradient boosting that has
demonstrated to be a reliable and efficient machine learning challenge solver.
This work proposes a practical analysis of how this novel technique works in
terms of training speed, generalization performance and parameter setup. In
addition, a comprehensive comparison between XGBoost, random forests and
gradient boosting has been performed using carefully tuned models as well as
using the default settings. The results of this comparison may indicate that
XGBoost is not necessarily the best choice under all circumstances. Finally an
extensive analysis of XGBoost parametrization tuning process is carried out.
%0 Generic
%1 bentejac2019comparative
%A Bentéjac, Candice
%A Csörgő, Anna
%A Martínez-Muñoz, Gonzalo
%D 2019
%K machinelearning xgboost
%T A Comparative Analysis of XGBoost
%U http://arxiv.org/abs/1911.01914
%X XGBoost is a scalable ensemble technique based on gradient boosting that has
demonstrated to be a reliable and efficient machine learning challenge solver.
This work proposes a practical analysis of how this novel technique works in
terms of training speed, generalization performance and parameter setup. In
addition, a comprehensive comparison between XGBoost, random forests and
gradient boosting has been performed using carefully tuned models as well as
using the default settings. The results of this comparison may indicate that
XGBoost is not necessarily the best choice under all circumstances. Finally an
extensive analysis of XGBoost parametrization tuning process is carried out.
@misc{bentejac2019comparative,
abstract = {XGBoost is a scalable ensemble technique based on gradient boosting that has
demonstrated to be a reliable and efficient machine learning challenge solver.
This work proposes a practical analysis of how this novel technique works in
terms of training speed, generalization performance and parameter setup. In
addition, a comprehensive comparison between XGBoost, random forests and
gradient boosting has been performed using carefully tuned models as well as
using the default settings. The results of this comparison may indicate that
XGBoost is not necessarily the best choice under all circumstances. Finally an
extensive analysis of XGBoost parametrization tuning process is carried out.},
added-at = {2019-11-06T17:56:25.000+0100},
author = {Bentéjac, Candice and Csörgő, Anna and Martínez-Muñoz, Gonzalo},
biburl = {https://www.bibsonomy.org/bibtex/22f49ac317e4c64818233a84a57674b0e/cpankow},
description = {[1911.01914] A Comparative Analysis of XGBoost},
interhash = {0a0b40e33dc05f8dc3394a7a40a43d34},
intrahash = {2f49ac317e4c64818233a84a57674b0e},
keywords = {machinelearning xgboost},
note = {cite arxiv:1911.01914},
timestamp = {2019-11-06T17:56:25.000+0100},
title = {A Comparative Analysis of XGBoost},
url = {http://arxiv.org/abs/1911.01914},
year = 2019
}