This paper presents a neural model which learns
low-dimensional nonlinear manifolds embedded in
higher-dimensional data space based on mixtures of
local linear manifolds under a self-organizing
framework. Compared to other similar networks, the
local linear manifolds learned by our network have a
more localized representation of local data
distributions thanks to a new distortion measure, which
removes confusion between sub-models that exists in
many similar mixture models. Each neuron in the network
asymptotically learns a mean vector and a principal
subspace of the data in its local region. It is proved
that there is no local extremum for each sub-model.
Experiments show that the new mixture model is better
adapted to nonlinear manifolds of various data
distributions than other similar models. The
online-learning property of this model is desirable
when the data set is very large, when computational
efficiency is of paramount importance, or when data are
sequentially input. We further show an application of
this model to recognition of handwritten digit images
based on mixtures of local linear manifolds.
%0 Journal Article
%1 zheng-nonlinear-manifolds-som-2009
%A Zheng, Huicheng
%A Shen, Wei
%A Dai, Qionghai
%A Hu, Sanqing
%A Lu, Zhe-Ming
%D 2009
%J Neurocomputing
%K som
%N 13–15
%P 3318--3330
%R http://dx.doi.org/10.1016/j.neucom.2009.01.008
%T Learning nonlinear manifolds based on mixtures of
localized linear manifolds under a self-organizing
framework
%U http://www.sciencedirect.com/science/article/pii/S0925231209000605
%V 72
%X This paper presents a neural model which learns
low-dimensional nonlinear manifolds embedded in
higher-dimensional data space based on mixtures of
local linear manifolds under a self-organizing
framework. Compared to other similar networks, the
local linear manifolds learned by our network have a
more localized representation of local data
distributions thanks to a new distortion measure, which
removes confusion between sub-models that exists in
many similar mixture models. Each neuron in the network
asymptotically learns a mean vector and a principal
subspace of the data in its local region. It is proved
that there is no local extremum for each sub-model.
Experiments show that the new mixture model is better
adapted to nonlinear manifolds of various data
distributions than other similar models. The
online-learning property of this model is desirable
when the data set is very large, when computational
efficiency is of paramount importance, or when data are
sequentially input. We further show an application of
this model to recognition of handwritten digit images
based on mixtures of local linear manifolds.
@article{zheng-nonlinear-manifolds-som-2009,
abstract = {This paper presents a neural model which learns
low-dimensional nonlinear manifolds embedded in
higher-dimensional data space based on mixtures of
local linear manifolds under a self-organizing
framework. Compared to other similar networks, the
local linear manifolds learned by our network have a
more localized representation of local data
distributions thanks to a new distortion measure, which
removes confusion between sub-models that exists in
many similar mixture models. Each neuron in the network
asymptotically learns a mean vector and a principal
subspace of the data in its local region. It is proved
that there is no local extremum for each sub-model.
Experiments show that the new mixture model is better
adapted to nonlinear manifolds of various data
distributions than other similar models. The
online-learning property of this model is desirable
when the data set is very large, when computational
efficiency is of paramount importance, or when data are
sequentially input. We further show an application of
this model to recognition of handwritten digit images
based on mixtures of local linear manifolds.},
added-at = {2016-07-12T19:24:18.000+0200},
author = {Zheng, Huicheng and Shen, Wei and Dai, Qionghai and Hu, Sanqing and Lu, Zhe-Ming},
biburl = {https://www.bibsonomy.org/bibtex/2f6aaefb4a6593bea8208dd2fabbe40e9/mhwombat},
doi = {http://dx.doi.org/10.1016/j.neucom.2009.01.008},
interhash = {5b83b7f70af21bceb84772dde95f4834},
intrahash = {f6aaefb4a6593bea8208dd2fabbe40e9},
issn = {0925-2312},
journal = {Neurocomputing},
keywords = {som},
note = {Hybrid Learning Machines (HAIS 2007) / Recent
Developments in Natural Computation (ICNC 2007)},
number = {13–15},
pages = {3318--3330},
timestamp = {2016-07-12T19:25:30.000+0200},
title = {Learning nonlinear manifolds based on mixtures of
localized linear manifolds under a self-organizing
framework},
url = {http://www.sciencedirect.com/science/article/pii/S0925231209000605},
volume = 72,
year = 2009
}