Relevance evaluation is an essential part of the development and maintenance of information retrieval systems. Yet traditional evaluation approaches have several limitations; in particular, conducting new editorial evaluations of a search system can be very expensive. We describe a new approach to evaluation called TERC, based on the crowdsourcing paradigm, in which many online users, drawn from a large community, each performs a small evaluation task.
%0 Journal Article
%1 alonso2008crowdsourcing
%A Alonso, Omar
%A Rose, Daniel E.
%A Stewart, Benjamin
%C New York, NY, USA
%D 2008
%I ACM
%J SIGIR Forum
%K crowdsourcing evaluation ir relevance alexandria
%N 2
%P 9--15
%R 10.1145/1480506.1480508
%T Crowdsourcing for relevance evaluation
%U http://doi.acm.org/10.1145/1480506.1480508
%V 42
%X Relevance evaluation is an essential part of the development and maintenance of information retrieval systems. Yet traditional evaluation approaches have several limitations; in particular, conducting new editorial evaluations of a search system can be very expensive. We describe a new approach to evaluation called TERC, based on the crowdsourcing paradigm, in which many online users, drawn from a large community, each performs a small evaluation task.
@article{alonso2008crowdsourcing,
abstract = {Relevance evaluation is an essential part of the development and maintenance of information retrieval systems. Yet traditional evaluation approaches have several limitations; in particular, conducting new editorial evaluations of a search system can be very expensive. We describe a new approach to evaluation called TERC, based on the crowdsourcing paradigm, in which many online users, drawn from a large community, each performs a small evaluation task.},
acmid = {1480508},
added-at = {2012-09-18T16:50:58.000+0200},
address = {New York, NY, USA},
author = {Alonso, Omar and Rose, Daniel E. and Stewart, Benjamin},
biburl = {https://www.bibsonomy.org/bibtex/24a47833e85558b740788607cb79ba795/jaeschke},
doi = {10.1145/1480506.1480508},
interhash = {8441d7fed92813634f61fa148ef2b870},
intrahash = {4a47833e85558b740788607cb79ba795},
issn = {0163-5840},
issue_date = {December 2008},
journal = {SIGIR Forum},
keywords = {crowdsourcing evaluation ir relevance alexandria},
month = nov,
number = 2,
numpages = {7},
pages = {9--15},
publisher = {ACM},
timestamp = {2014-07-28T15:57:31.000+0200},
title = {Crowdsourcing for relevance evaluation},
url = {http://doi.acm.org/10.1145/1480506.1480508},
volume = 42,
year = 2008
}