We propose a new class of support vector algorithms for regression and classification. In these algorithms, a parameter ν lets one effectively control the number of support vectors. While this can be useful in its own right, the parameterization has the additional benefit of enabling us to eliminate one of the other free parameters of the algorithm: the accuracy parameter epsilon in the regression case, and the regularization constant C in the classification case. We describe the algorithms, give some theoretical results concerning the meaning and the choice of ν, and report experimental results.
%0 Journal Article
%1 schoelkopf2000
%A Schölkopf, Bernhard
%A Smola, Alex J.
%A Williamson, Robert C.
%A Bartlett, Peter L.
%C Cambridge, MA, USA
%D 2000
%I MIT Press
%J Neural Computation
%K learning mining svm
%N 5
%P 1207--1245
%R 10.1162/089976600300015565
%T New Support Vector Algorithms
%U http://dl.acm.org/citation.cfm?id=1139689.1139691
%V 12
%X We propose a new class of support vector algorithms for regression and classification. In these algorithms, a parameter ν lets one effectively control the number of support vectors. While this can be useful in its own right, the parameterization has the additional benefit of enabling us to eliminate one of the other free parameters of the algorithm: the accuracy parameter epsilon in the regression case, and the regularization constant C in the classification case. We describe the algorithms, give some theoretical results concerning the meaning and the choice of ν, and report experimental results.
@article{schoelkopf2000,
abstract = {We propose a new class of support vector algorithms for regression and classification. In these algorithms, a parameter ν lets one effectively control the number of support vectors. While this can be useful in its own right, the parameterization has the additional benefit of enabling us to eliminate one of the other free parameters of the algorithm: the accuracy parameter epsilon in the regression case, and the regularization constant C in the classification case. We describe the algorithms, give some theoretical results concerning the meaning and the choice of ν, and report experimental results.},
acmid = {1139691},
added-at = {2011-10-07T11:41:37.000+0200},
address = {Cambridge, MA, USA},
author = {Sch\"{o}lkopf, Bernhard and Smola, Alex J. and Williamson, Robert C. and Bartlett, Peter L.},
biburl = {https://www.bibsonomy.org/bibtex/202f111bd801da993bef0c401e98cfb49/utahell},
description = {New Support Vector Algorithms},
doi = {10.1162/089976600300015565},
interhash = {6a14aefc1f58a518e8efc44cea9f56c0},
intrahash = {02f111bd801da993bef0c401e98cfb49},
issn = {0899-7667},
issue = {5},
journal = {Neural Computation},
keywords = {learning mining svm},
month = may,
number = 5,
numpages = {39},
pages = {1207--1245},
publisher = {MIT Press},
timestamp = {2011-12-16T14:32:46.000+0100},
title = {New Support Vector Algorithms},
url = {http://dl.acm.org/citation.cfm?id=1139689.1139691},
volume = 12,
year = 2000
}