This paper addresses the challenge of assessing and
modeling Quality of Experience (QoE) for online video services
that are based on TCP-streaming. We present a dedicated QoE
model for YouTube that takes into account the key influence
factors (such as stalling events caused by network bottlenecks)
that shape quality perception of this service. As second contribution,
we propose a generic subjective QoE assessment
methodology for multimedia applications (like online video) that
is based on crowdsourcing - a highly cost-efficient, fast and
flexible way of conducting user experiments. We demonstrate
how our approach successfully leverages the inherent strengths
of crowdsourcing while addressing critical aspects such as the
reliability of the experimental data obtained. Our results suggest
that, crowdsourcing is a highly effective QoE assessment method
not only for online video, but also for a wide range of other
current and future Internet applications.
%0 Conference Paper
%1 info3-inproceedings-2011-426
%A Hoßfeld, Tobias
%A Schatz, Raimund
%A Seufert, Michael
%A Hirth, Matthias
%A Zinner, Thomas
%A Tran-Gia, Phuoc
%B IEEE International Workshop on Multimedia Quality of Experience - Modeling, Evaluation, and Directions (MQoE)
%C Dana Point, CA, USA
%D 2011
%K myown demobenefit
%T Quantification of YouTube QoE via Crowdsourcing
%X This paper addresses the challenge of assessing and
modeling Quality of Experience (QoE) for online video services
that are based on TCP-streaming. We present a dedicated QoE
model for YouTube that takes into account the key influence
factors (such as stalling events caused by network bottlenecks)
that shape quality perception of this service. As second contribution,
we propose a generic subjective QoE assessment
methodology for multimedia applications (like online video) that
is based on crowdsourcing - a highly cost-efficient, fast and
flexible way of conducting user experiments. We demonstrate
how our approach successfully leverages the inherent strengths
of crowdsourcing while addressing critical aspects such as the
reliability of the experimental data obtained. Our results suggest
that, crowdsourcing is a highly effective QoE assessment method
not only for online video, but also for a wide range of other
current and future Internet applications.
@inproceedings{info3-inproceedings-2011-426,
abstract = {This paper addresses the challenge of assessing and
modeling Quality of Experience (QoE) for online video services
that are based on TCP-streaming. We present a dedicated QoE
model for YouTube that takes into account the key influence
factors (such as stalling events caused by network bottlenecks)
that shape quality perception of this service. As second contribution,
we propose a generic subjective QoE assessment
methodology for multimedia applications (like online video) that
is based on crowdsourcing - a highly cost-efficient, fast and
flexible way of conducting user experiments. We demonstrate
how our approach successfully leverages the inherent strengths
of crowdsourcing while addressing critical aspects such as the
reliability of the experimental data obtained. Our results suggest
that, crowdsourcing is a highly effective QoE assessment method
not only for online video, but also for a wide range of other
current and future Internet applications.},
added-at = {2016-03-10T17:37:54.000+0100},
address = {Dana Point, CA, USA},
author = {Hoßfeld, Tobias and Schatz, Raimund and Seufert, Michael and Hirth, Matthias and Zinner, Thomas and Tran-Gia, Phuoc},
biburl = {https://www.bibsonomy.org/bibtex/2ad6c2207fe7d7430a258838742ce31cc/uniwue_info3},
booktitle = {IEEE International Workshop on Multimedia Quality of Experience - Modeling, Evaluation, and Directions (MQoE)},
interhash = {fde4210e8e52951a75d9c92785a4c3e4},
intrahash = {ad6c2207fe7d7430a258838742ce31cc},
keywords = {myown demobenefit},
month = {12},
timestamp = {2022-03-14T00:10:49.000+0100},
title = {Quantification of YouTube QoE via Crowdsourcing},
year = 2011
}