Since its introduction a few years ago, the concept of 'Crowdsourcing' has been heralded as highly attractive alternative approach towards evaluating the Quality of Experience (QoE) of networked multimedia services. The main reason is that, in comparison to traditional laboratory-based subjective quality testing, crowd-based QoE assessment over the Internet promises to be not only much more cost-effective (no lab facilities required, less cost per subject) but also much faster in terms of shorter campaign setup and turnaround times. However, the reliability of remote test subjects and consequently, the trustworthiness of study results is still an issue that prevents the widespread adoption of crowd-based QoE testing. Various ideas for improving user rating reliability and test efficiency have been proposed, with the majority of them relying on a posteriori analysis of results. However, such methods introduce a major lag that significantly affects efficiency of campaign execution. In this paper we address these shortcomings by introducing in momento methods for crowdsourced video QoE assessment, which yield improvements of results reliability by the factor two and campaign execution efficiency by the factor ten. The proposed in momento methods are applicable to existing crowd-based QoE testing approaches and suitable for a variety of service scenarios.
%0 Conference Paper
%1 info3-inproceedings-2014-488
%A Gardlo, Bruno
%A Egger, Sebastian
%A Seufert, Michael
%A Schatz, Raimund
%B IEEE International Conference on Communications (ICC)
%C Sydney, Australia
%D 2014
%K myown mplane
%T Crowdsourcing 2.0: Enhancing Execution Speed and Reliability of Web-based QoE Testing
%X Since its introduction a few years ago, the concept of 'Crowdsourcing' has been heralded as highly attractive alternative approach towards evaluating the Quality of Experience (QoE) of networked multimedia services. The main reason is that, in comparison to traditional laboratory-based subjective quality testing, crowd-based QoE assessment over the Internet promises to be not only much more cost-effective (no lab facilities required, less cost per subject) but also much faster in terms of shorter campaign setup and turnaround times. However, the reliability of remote test subjects and consequently, the trustworthiness of study results is still an issue that prevents the widespread adoption of crowd-based QoE testing. Various ideas for improving user rating reliability and test efficiency have been proposed, with the majority of them relying on a posteriori analysis of results. However, such methods introduce a major lag that significantly affects efficiency of campaign execution. In this paper we address these shortcomings by introducing in momento methods for crowdsourced video QoE assessment, which yield improvements of results reliability by the factor two and campaign execution efficiency by the factor ten. The proposed in momento methods are applicable to existing crowd-based QoE testing approaches and suitable for a variety of service scenarios.
@inproceedings{info3-inproceedings-2014-488,
abstract = {Since its introduction a few years ago, the concept of 'Crowdsourcing' has been heralded as highly attractive alternative approach towards evaluating the Quality of Experience (QoE) of networked multimedia services. The main reason is that, in comparison to traditional laboratory-based subjective quality testing, crowd-based QoE assessment over the Internet promises to be not only much more cost-effective (no lab facilities required, less cost per subject) but also much faster in terms of shorter campaign setup and turnaround times. However, the reliability of remote test subjects and consequently, the trustworthiness of study results is still an issue that prevents the widespread adoption of crowd-based QoE testing. Various ideas for improving user rating reliability and test efficiency have been proposed, with the majority of them relying on a posteriori analysis of results. However, such methods introduce a major lag that significantly affects efficiency of campaign execution. In this paper we address these shortcomings by introducing in momento methods for crowdsourced video QoE assessment, which yield improvements of results reliability by the factor two and campaign execution efficiency by the factor ten. The proposed in momento methods are applicable to existing crowd-based QoE testing approaches and suitable for a variety of service scenarios.},
added-at = {2016-03-10T17:38:20.000+0100},
address = {Sydney, Australia},
author = {Gardlo, Bruno and Egger, Sebastian and Seufert, Michael and Schatz, Raimund},
biburl = {https://www.bibsonomy.org/bibtex/238449e22d781cf24dc9b13bd9d9db87d/uniwue_info3},
booktitle = {IEEE International Conference on Communications (ICC)},
interhash = {92c25887ab3d920453004013e9ede2fd},
intrahash = {38449e22d781cf24dc9b13bd9d9db87d},
keywords = {myown mplane},
month = {6},
timestamp = {2022-03-14T00:11:02.000+0100},
title = {Crowdsourcing 2.0: Enhancing Execution Speed and Reliability of Web-based QoE Testing},
year = 2014
}