Question answering (Q&A) sites, where communities of volunteers answer questions, may provide faster, cheaper, and better services than traditional institutions. However, like other Web 2.0 platforms, user-created content raises concerns about information quality. At the same time, Q&A sites may provide answers of different quality because they have different communities and technological platforms. This paper compares answer quality on four Q&A sites: Askville, WikiAnswers, Wikipedia Reference Desk, and Yahoo! Answers. Findings indicate that: (1) similar collaborative processes on these sites result in a wide range of outcomes, and significant differences in answer accuracy, completeness, and verifiability were evident; (2) answer multiplication does not always result in better information; it yields more complete and verifiable answers but does not result in higher accuracy levels; and (3) a Q&A site’s popularity does not correlate with its answer quality, on all three measures.
%0 Journal Article
%1 Fichman01102011
%A Fichman, Pnina
%D 2011
%J Journal of Information Science
%K A answer answering assessment bisibs comparative four of on quality question sites
%N 5
%P 476-486
%R 10.1177/0165551511415584
%T A comparative assessment of answer quality on four question answering sites
%U http://jis.sagepub.com/content/37/5/476.abstract
%V 37
%X Question answering (Q&A) sites, where communities of volunteers answer questions, may provide faster, cheaper, and better services than traditional institutions. However, like other Web 2.0 platforms, user-created content raises concerns about information quality. At the same time, Q&A sites may provide answers of different quality because they have different communities and technological platforms. This paper compares answer quality on four Q&A sites: Askville, WikiAnswers, Wikipedia Reference Desk, and Yahoo! Answers. Findings indicate that: (1) similar collaborative processes on these sites result in a wide range of outcomes, and significant differences in answer accuracy, completeness, and verifiability were evident; (2) answer multiplication does not always result in better information; it yields more complete and verifiable answers but does not result in higher accuracy levels; and (3) a Q&A site’s popularity does not correlate with its answer quality, on all three measures.
@article{Fichman01102011,
abstract = {Question answering (Q&A) sites, where communities of volunteers answer questions, may provide faster, cheaper, and better services than traditional institutions. However, like other Web 2.0 platforms, user-created content raises concerns about information quality. At the same time, Q&A sites may provide answers of different quality because they have different communities and technological platforms. This paper compares answer quality on four Q&A sites: Askville, WikiAnswers, Wikipedia Reference Desk, and Yahoo! Answers. Findings indicate that: (1) similar collaborative processes on these sites result in a wide range of outcomes, and significant differences in answer accuracy, completeness, and verifiability were evident; (2) answer multiplication does not always result in better information; it yields more complete and verifiable answers but does not result in higher accuracy levels; and (3) a Q&A site’s popularity does not correlate with its answer quality, on all three measures.},
added-at = {2011-12-08T14:10:49.000+0100},
author = {Fichman, Pnina},
biburl = {https://www.bibsonomy.org/bibtex/247aaf2326a085e6dec670d0e440933e5/griesbau},
doi = {10.1177/0165551511415584},
eprint = {http://jis.sagepub.com/content/37/5/476.full.pdf+html},
interhash = {c0903d4b4d2e65f50dfe0220b12424c9},
intrahash = {47aaf2326a085e6dec670d0e440933e5},
journal = {Journal of Information Science},
keywords = {A answer answering assessment bisibs comparative four of on quality question sites},
number = 5,
pages = {476-486},
timestamp = {2011-12-08T14:10:49.000+0100},
title = {A comparative assessment of answer quality on four question answering sites},
url = {http://jis.sagepub.com/content/37/5/476.abstract},
volume = 37,
year = 2011
}