A commonly cited maxim states that 'you get what you pay for'. Implicit in it is the idea that there is a strong correlation
between the price paid for something, and its quality. In this paper, we examine
whether that applies to crowdsourcing for QoE experiments, and if so, how. As
part of a large-scale Web QoE experiment, we conducted two crowdsourced
campaigns to try and understand the influence of certain website design
parameters related to typography and color on the overall visual appeal of the
site. The second campaign was set up to pay three
times the reward of the first one. We analyze the impact of payment on a number of parameters, including ratio of reliable users and obtained MOS values, and provide recommendations for QoE modeling.
%0 Conference Paper
%1 info3-inproceedings-2013-470
%A Varela, Martin
%A Mäki, Toni
%A Skorin-Kapov, Lea
%A Hoßfeld, Tobias
%B 4th International Workshop on Perceptual Quality of Systems (PQS 2013)
%C Vienna, Austria
%D 2013
%K myown
%T Increasing Payments in Crowdsourcing: Don't look a gift horse in the mouth
%X A commonly cited maxim states that 'you get what you pay for'. Implicit in it is the idea that there is a strong correlation
between the price paid for something, and its quality. In this paper, we examine
whether that applies to crowdsourcing for QoE experiments, and if so, how. As
part of a large-scale Web QoE experiment, we conducted two crowdsourced
campaigns to try and understand the influence of certain website design
parameters related to typography and color on the overall visual appeal of the
site. The second campaign was set up to pay three
times the reward of the first one. We analyze the impact of payment on a number of parameters, including ratio of reliable users and obtained MOS values, and provide recommendations for QoE modeling.
@inproceedings{info3-inproceedings-2013-470,
abstract = {A commonly cited maxim states that 'you get what you pay for'. Implicit in it is the idea that there is a strong correlation
between the price paid for something, and its quality. In this paper, we examine
whether that applies to crowdsourcing for QoE experiments, and if so, how. As
part of a large-scale Web QoE experiment, we conducted two crowdsourced
campaigns to try and understand the influence of certain website design
parameters related to typography and color on the overall visual appeal of the
site. The second campaign was set up to pay three
times the reward of the first one. We analyze the impact of payment on a number of parameters, including ratio of reliable users and obtained MOS values, and provide recommendations for QoE modeling.
},
added-at = {2016-03-10T17:38:14.000+0100},
address = {Vienna, Austria},
author = {Varela, Martin and Mäki, Toni and Skorin-Kapov, Lea and Hoßfeld, Tobias},
biburl = {https://www.bibsonomy.org/bibtex/25a7229ab1839b5b5fa8900ea3052d6e5/uniwue_info3},
booktitle = {4th International Workshop on Perceptual Quality of Systems (PQS 2013)},
interhash = {8dcd9c1add9711db89ecc1b3f3299173},
intrahash = {5a7229ab1839b5b5fa8900ea3052d6e5},
keywords = {myown},
month = {9},
timestamp = {2022-03-14T00:10:59.000+0100},
title = {Increasing Payments in Crowdsourcing: Don't look a gift horse in the mouth},
year = 2013
}