Large-scale data analysis poses both statistical and computational problems
which need to be addressed simultaneously. A solution is often straightforward
if the data are homogeneous: one can use classical ideas of subsampling and
mean aggregation to get a computationally efficient solution with acceptable
statistical accuracy, where the aggregation step simply averages the results
obtained on distinct subsets of the data. However, if the data exhibit
inhomogeneities (and typically they do), the same approach will be inadequate,
as it will be unduly influenced by effects that are not persistent across all
the data due to, for example, outliers or time-varying effects. We show that a
tweak to the aggregation step can produce an estimator of effects which are
common to all data, and hence interesting for interpretation and often leading
to better prediction than pooled effects.
Description
main Ideas for large scale data:
- need efficient algorithms
- data is not iid. -> methods for inhomogenous data necessary
=> intuition: only keep effects that are similar in all groups (observed and unobserved), but group-specific effects are "averaged away"
%0 Generic
%1 buhlmann2014magging
%A Bühlmann, Peter
%A Meinshausen, Nicolai
%D 2014
%K bagging inhomogeneous library(quadprog) magging stacking subsampling
%T Magging: maximin aggregation for inhomogeneous large-scale data
%U http://arxiv.org/abs/1409.2638
%X Large-scale data analysis poses both statistical and computational problems
which need to be addressed simultaneously. A solution is often straightforward
if the data are homogeneous: one can use classical ideas of subsampling and
mean aggregation to get a computationally efficient solution with acceptable
statistical accuracy, where the aggregation step simply averages the results
obtained on distinct subsets of the data. However, if the data exhibit
inhomogeneities (and typically they do), the same approach will be inadequate,
as it will be unduly influenced by effects that are not persistent across all
the data due to, for example, outliers or time-varying effects. We show that a
tweak to the aggregation step can produce an estimator of effects which are
common to all data, and hence interesting for interpretation and often leading
to better prediction than pooled effects.
@misc{buhlmann2014magging,
abstract = {Large-scale data analysis poses both statistical and computational problems
which need to be addressed simultaneously. A solution is often straightforward
if the data are homogeneous: one can use classical ideas of subsampling and
mean aggregation to get a computationally efficient solution with acceptable
statistical accuracy, where the aggregation step simply averages the results
obtained on distinct subsets of the data. However, if the data exhibit
inhomogeneities (and typically they do), the same approach will be inadequate,
as it will be unduly influenced by effects that are not persistent across all
the data due to, for example, outliers or time-varying effects. We show that a
tweak to the aggregation step can produce an estimator of effects which are
common to all data, and hence interesting for interpretation and often leading
to better prediction than pooled effects.},
added-at = {2014-09-23T15:28:57.000+0200},
author = {Bühlmann, Peter and Meinshausen, Nicolai},
biburl = {https://www.bibsonomy.org/bibtex/2ed71524148a89a19b96fda5fe832d22d/malteschierholz},
description = {main Ideas for large scale data:
- need efficient algorithms
- data is not iid. -> methods for inhomogenous data necessary
=> intuition: only keep effects that are similar in all groups (observed and unobserved), but group-specific effects are "averaged away"},
interhash = {bbf59ba0b1f9dfacf5d17c185f2c5645},
intrahash = {ed71524148a89a19b96fda5fe832d22d},
keywords = {bagging inhomogeneous library(quadprog) magging stacking subsampling},
note = {cite arxiv:1409.2638Comment: 15 pages, 3 figures},
timestamp = {2014-09-23T15:28:57.000+0200},
title = {Magging: maximin aggregation for inhomogeneous large-scale data},
url = {http://arxiv.org/abs/1409.2638},
year = 2014
}