In our work, we propose a novel formulation for supervised dimensionality
reduction based on a nonlinear dependency criterion called Statistical Distance
Correlation, Szekely et. al. (2007). We propose an objective which is free of
distributional assumptions on regression variables and regression model
assumptions. Our proposed formulation is based on learning a low-dimensional
feature representation $z$, which maximizes the squared sum of
Distance Correlations between low dimensional features $z$ and
response $y$, and also between features $z$ and covariates
$x$. We propose a novel algorithm to optimize our proposed objective
using the Generalized Minimization Maximizaiton method of \Parizi et. al.
(2015). We show superior empirical results on multiple datasets proving the
effectiveness of our proposed approach over several relevant state-of-the-art
supervised dimensionality reduction methods.
Beschreibung
Supervised Dimensionality Reduction via Distance Correlation
Maximization
%0 Journal Article
%1 sdrdcmax
%A Vepakomma, Praneeth
%A Tonde, Chetan
%A Elgammal, Ahmed
%D 2016
%K correlation dimensionality distance reduction supervised
%T Supervised Dimensionality Reduction via Distance Correlation
Maximization
%U http://arxiv.org/abs/1601.00236
%X In our work, we propose a novel formulation for supervised dimensionality
reduction based on a nonlinear dependency criterion called Statistical Distance
Correlation, Szekely et. al. (2007). We propose an objective which is free of
distributional assumptions on regression variables and regression model
assumptions. Our proposed formulation is based on learning a low-dimensional
feature representation $z$, which maximizes the squared sum of
Distance Correlations between low dimensional features $z$ and
response $y$, and also between features $z$ and covariates
$x$. We propose a novel algorithm to optimize our proposed objective
using the Generalized Minimization Maximizaiton method of \Parizi et. al.
(2015). We show superior empirical results on multiple datasets proving the
effectiveness of our proposed approach over several relevant state-of-the-art
supervised dimensionality reduction methods.
@article{sdrdcmax,
abstract = {In our work, we propose a novel formulation for supervised dimensionality
reduction based on a nonlinear dependency criterion called Statistical Distance
Correlation, Szekely et. al. (2007). We propose an objective which is free of
distributional assumptions on regression variables and regression model
assumptions. Our proposed formulation is based on learning a low-dimensional
feature representation $\mathbf{z}$, which maximizes the squared sum of
Distance Correlations between low dimensional features $\mathbf{z}$ and
response $y$, and also between features $\mathbf{z}$ and covariates
$\mathbf{x}$. We propose a novel algorithm to optimize our proposed objective
using the Generalized Minimization Maximizaiton method of \Parizi et. al.
(2015). We show superior empirical results on multiple datasets proving the
effectiveness of our proposed approach over several relevant state-of-the-art
supervised dimensionality reduction methods.},
added-at = {2016-01-05T09:03:25.000+0100},
author = {Vepakomma, Praneeth and Tonde, Chetan and Elgammal, Ahmed},
biburl = {https://www.bibsonomy.org/bibtex/22ea55b1f11b6dc683dc33c88f3aa501d/cjtonde},
description = {Supervised Dimensionality Reduction via Distance Correlation
Maximization},
interhash = {527f3f2614e9d1f7741742dc2c93ef8a},
intrahash = {2ea55b1f11b6dc683dc33c88f3aa501d},
keywords = {correlation dimensionality distance reduction supervised},
note = {cite arxiv:1601.00236Comment: 23 pages, 6 figures},
timestamp = {2016-01-05T09:03:25.000+0100},
title = {Supervised Dimensionality Reduction via Distance Correlation
Maximization},
url = {http://arxiv.org/abs/1601.00236},
year = 2016
}