In this paper we present some new methods of ranking information retrieval systems without relevance judgement. The common ground of these methods is using a measure we called reference count. An extensive experimentation was conducted to evaluate the effectiveness of the proposed methods using various different standards Information Retrieval evaluation measures for the ranking, like average precision, R-precision, and precision and different document levels. We also compared the effectiveness of the proposed methods with the method proposed by Soboroff et al. The experimental results showed that the proposed methods are effective, and in many cases are more effective than Soboroff at al.'s method.
Description
Methods for ranking information retrieval systems without relevance judgments
%0 Conference Paper
%1 Wu03
%A Wu, Shengli
%A Crestani, Fabio
%B SAC '03: Proceedings of the 2003 ACM symposium on Applied computing
%C New York, NY, USA
%D 2003
%I ACM
%K InformationRetrieval RelevanceFeedback ranking
%P 811--816
%R http://doi.acm.org/10.1145/952532.952693
%T Methods for ranking information retrieval systems without relevance judgments
%U http://portal.acm.org/citation.cfm?id=952532.952693
%X In this paper we present some new methods of ranking information retrieval systems without relevance judgement. The common ground of these methods is using a measure we called reference count. An extensive experimentation was conducted to evaluate the effectiveness of the proposed methods using various different standards Information Retrieval evaluation measures for the ranking, like average precision, R-precision, and precision and different document levels. We also compared the effectiveness of the proposed methods with the method proposed by Soboroff et al. The experimental results showed that the proposed methods are effective, and in many cases are more effective than Soboroff at al.'s method.
%@ 1-58113-624-2
@inproceedings{Wu03,
abstract = {In this paper we present some new methods of ranking information retrieval systems without relevance judgement. The common ground of these methods is using a measure we called reference count. An extensive experimentation was conducted to evaluate the effectiveness of the proposed methods using various different standards Information Retrieval evaluation measures for the ranking, like average precision, R-precision, and precision and different document levels. We also compared the effectiveness of the proposed methods with the method proposed by Soboroff et al. The experimental results showed that the proposed methods are effective, and in many cases are more effective than Soboroff at al.'s method.},
added-at = {2008-10-08T11:34:23.000+0200},
address = {New York, NY, USA},
author = {Wu, Shengli and Crestani, Fabio},
biburl = {https://www.bibsonomy.org/bibtex/242b172c6b6a1381437c2ad47d952d010/mkroell},
booktitle = {SAC '03: Proceedings of the 2003 ACM symposium on Applied computing},
description = {Methods for ranking information retrieval systems without relevance judgments},
doi = {http://doi.acm.org/10.1145/952532.952693},
interhash = {3d0de751737dbe8ad57df8422ec4c0c6},
intrahash = {42b172c6b6a1381437c2ad47d952d010},
isbn = {1-58113-624-2},
keywords = {InformationRetrieval RelevanceFeedback ranking},
location = {Melbourne, Florida},
pages = {811--816},
publisher = {ACM},
timestamp = {2009-03-10T11:19:23.000+0100},
title = {Methods for ranking information retrieval systems without relevance judgments},
url = {http://portal.acm.org/citation.cfm?id=952532.952693},
year = 2003
}