G. Webb. Lecture Notes in Computer Science Vol. 2256: Proceedings of the 14th Australian Joint Conference on Artificial Intelligence (AI'01), Seite 545-556. Berlin/Heidelberg, Springer, (2001)
Zusammenfassung
Lazy Bayesian Rules modify naive Bayesian classification to undo elements of the harmful attribute independence assumption. It has been shown to provide classification error comparable to boosting decision trees. This paper explores alternatives to the candidate elimination criterion employed within Lazy Bayesian Rules. Improvements over naive Bayes are consistent so long as the candidate elimination criteria ensures there is sufficient data for accurate probability estimation. However, the original candidate elimination criterion is demonstrated to provide better overall error reduction than the use of a minimum data subset size criterion.
%0 Conference Paper
%1 Webb01b
%A Webb, G. I.
%B Lecture Notes in Computer Science Vol. 2256: Proceedings of the 14th Australian Joint Conference on Artificial Intelligence (AI'01)
%C Berlin/Heidelberg
%D 2001
%E Stumptner, M.
%E Corbett, D.
%E Brooks, M.J.
%I Springer
%K learning machine
%P 545-556
%T Candidate Elimination Criteria for Lazy Bayesian
%X Lazy Bayesian Rules modify naive Bayesian classification to undo elements of the harmful attribute independence assumption. It has been shown to provide classification error comparable to boosting decision trees. This paper explores alternatives to the candidate elimination criterion employed within Lazy Bayesian Rules. Improvements over naive Bayes are consistent so long as the candidate elimination criteria ensures there is sufficient data for accurate probability estimation. However, the original candidate elimination criterion is demonstrated to provide better overall error reduction than the use of a minimum data subset size criterion.
@inproceedings{Webb01b,
abstract = {Lazy Bayesian Rules modify naive Bayesian classification to undo elements of the harmful attribute independence assumption. It has been shown to provide classification error comparable to boosting decision trees. This paper explores alternatives to the candidate elimination criterion employed within Lazy Bayesian Rules. Improvements over naive Bayes are consistent so long as the candidate elimination criteria ensures there is sufficient data for accurate probability estimation. However, the original candidate elimination criterion is demonstrated to provide better overall error reduction than the use of a minimum data subset size criterion. },
added-at = {2016-03-20T05:42:04.000+0100},
address = {Berlin/Heidelberg},
audit-trail = {*},
author = {Webb, G. I.},
biburl = {https://www.bibsonomy.org/bibtex/29a8ff7bc660b68d320a145447e97d12f/giwebb},
booktitle = {Lecture Notes in Computer Science Vol. 2256: Proceedings of the 14th Australian Joint Conference on Artificial Intelligence (AI'01)},
editor = {Stumptner, M. and Corbett, D. and Brooks, M.J.},
interhash = {7d077c8d7f40e9febf222d73f1c3e942},
intrahash = {9a8ff7bc660b68d320a145447e97d12f},
keywords = {learning machine},
location = {Adelaide, Australia},
pages = {545-556},
publisher = {Springer},
timestamp = {2016-03-20T05:42:04.000+0100},
title = {Candidate Elimination Criteria for Lazy Bayesian},
year = 2001
}