This paper presents a general coding method where data in a Hilbert space are
represented by finite dimensional coding vectors. The method is based on
empirical risk minimization within a certain class of linear operators, which
map the set of coding vectors to the Hilbert space. Two results bounding the
expected reconstruction error of the method are derived, which highlight the
role played by the codebook and the class of linear operators. The results are
specialized to some cases of practical importance, including K-means
clustering, nonnegative matrix factorization and other sparse coding methods.
%0 Generic
%1 citeulike:6644445
%A Pontil, Andreas Maurer Massimiliano
%D 2010
%K 94a29-source-coding 68t05-learning-and-adaptive-systems 62h30-classification-discrimination-cluster-analysis
%T K-Dimensional Coding Schemes in Hilbert Spaces
%U http://arxiv.org/abs/1002.0832
%X This paper presents a general coding method where data in a Hilbert space are
represented by finite dimensional coding vectors. The method is based on
empirical risk minimization within a certain class of linear operators, which
map the set of coding vectors to the Hilbert space. Two results bounding the
expected reconstruction error of the method are derived, which highlight the
role played by the codebook and the class of linear operators. The results are
specialized to some cases of practical importance, including K-means
clustering, nonnegative matrix factorization and other sparse coding methods.
@misc{citeulike:6644445,
abstract = {{This paper presents a general coding method where data in a Hilbert space are
represented by finite dimensional coding vectors. The method is based on
empirical risk minimization within a certain class of linear operators, which
map the set of coding vectors to the Hilbert space. Two results bounding the
expected reconstruction error of the method are derived, which highlight the
role played by the codebook and the class of linear operators. The results are
specialized to some cases of practical importance, including K-means
clustering, nonnegative matrix factorization and other sparse coding methods.}},
added-at = {2017-06-29T07:13:07.000+0200},
archiveprefix = {arXiv},
author = {Pontil, Andreas Maurer Massimiliano},
biburl = {https://www.bibsonomy.org/bibtex/274e3c0dba01e71d8488506163f1fe7c2/gdmcbain},
citeulike-article-id = {6644445},
citeulike-attachment-1 = {10020832.pdf; /pdf/user/gdmcbain/article/6644445/943108/10020832.pdf; 2e233d7e819411e0a4b57ee159cee97b9dc732f9},
citeulike-linkout-0 = {http://arxiv.org/abs/1002.0832},
citeulike-linkout-1 = {http://arxiv.org/pdf/1002.0832},
day = 3,
eprint = {1002.0832},
file = {10020832.pdf},
interhash = {4ef8fb038a74ccf11954c89d430e3883},
intrahash = {74e3c0dba01e71d8488506163f1fe7c2},
keywords = {94a29-source-coding 68t05-learning-and-adaptive-systems 62h30-classification-discrimination-cluster-analysis},
month = feb,
posted-at = {2014-01-23 22:56:19},
priority = {2},
timestamp = {2020-09-03T05:53:54.000+0200},
title = {{K-Dimensional Coding Schemes in Hilbert Spaces}},
url = {http://arxiv.org/abs/1002.0832},
year = 2010
}