This paper addresses the problem of improving the accuracy of an hypothesis output by a learning algorithm in the distribution-free ( PAC ) learning model. A concept class is learnable (or strongly learnable ) if, given access to a source of examples of the unknown concept, the learner with high probability is able to output an hypothesis that is correct on all but an arbitrarily small fraction of the instances. The concept class is weakly learnable if the learner can produce an hypothesis that performs only slightly better than random guessing. In this paper, it is shown that these two notions of learnability are equivalent.
%0 Journal Article
%1 schapire_strength_1990
%A Schapire, Robert E.
%D 1990
%J Machine Learning
%K Boosting, Learning Machine
%N 2
%P 197--227
%R 10.1023/A:1022648800760
%T The strength of weak learnability
%U http://www.cs.princeton.edu/~schapire/papers/strengthofweak.pdf
%V 5
%X This paper addresses the problem of improving the accuracy of an hypothesis output by a learning algorithm in the distribution-free ( PAC ) learning model. A concept class is learnable (or strongly learnable ) if, given access to a source of examples of the unknown concept, the learner with high probability is able to output an hypothesis that is correct on all but an arbitrarily small fraction of the instances. The concept class is weakly learnable if the learner can produce an hypothesis that performs only slightly better than random guessing. In this paper, it is shown that these two notions of learnability are equivalent.
@article{schapire_strength_1990,
abstract = {This paper addresses the problem of improving the accuracy of an hypothesis output by a learning algorithm in the distribution-free ( PAC ) learning model. A concept class is learnable (or strongly learnable ) if, given access to a source of examples of the unknown concept, the learner with high probability is able to output an hypothesis that is correct on all but an arbitrarily small fraction of the instances. The concept class is weakly learnable if the learner can produce an hypothesis that performs only slightly better than random guessing. In this paper, it is shown that these two notions of learnability are equivalent.},
added-at = {2017-01-09T13:57:26.000+0100},
author = {Schapire, Robert E.},
biburl = {https://www.bibsonomy.org/bibtex/2f4965d9dea2d6e753a715cfc12dd5790/yourwelcome},
doi = {10.1023/A:1022648800760},
interhash = {9d43cf373e136baedbe868305bcde631},
intrahash = {f4965d9dea2d6e753a715cfc12dd5790},
issn = {0885-6125},
journal = {Machine Learning},
keywords = {Boosting, Learning Machine},
number = 2,
pages = {197--227},
timestamp = {2017-01-09T14:01:11.000+0100},
title = {The strength of weak learnability},
url = {http://www.cs.princeton.edu/~schapire/papers/strengthofweak.pdf},
urldate = {2012-09-24},
volume = 5,
year = 1990
}