This article investigates the effects of different XR displays on the
perception and plausibility of personalized virtual humans. We compared immersive virtual reality (VR), video see-through augmented reality (VST AR), and optical see-through AR (OST AR). The personalized virtual alter egos were generated by state-of-the-art photogrammetry methods. 42 participants were repeatedly exposed to animated versions of their 3D-reconstructed virtual alter egos in each of the three XR display conditions. The reconstructed virtual alter egos were additionally modified in body weight for each repetition. We show that the display types lead to different degrees of incongruence between the renderings of the virtual humans and the presentation of the respective environmental backgrounds, leading to significant effects of perceived mismatches as part of a plausibility measurement. The device-related effects were further partly confirmed by subjective misestimations of the modified body weight and the measured spatial presence. Here, the exceedingly incongruent OST AR condition leads to the significantly highest weight misestimations as well as to the lowest perceived spatial presence. However, similar effects could not be confirmed for the affective appraisal (i.e., humanness, eeriness, or attractiveness) of the virtual humans, giving rise to the assumption that these factors might be unrelated to each other.
%0 Conference Paper
%1 wolf2022plausibility
%A Wolf, Erik
%A Mal, David
%A Frohnapfel, Viktor
%A Döllinger, Nina
%A Wenninger, Stephan
%A Botsch, Mario
%A Latoschik, Marc Erich
%A Wienrich, Carolin
%B 2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)
%D 2022
%K dmal emlab ewolf myown vitras
%P 489-498
%R 10.1109/ISMAR55827.2022.00065
%T Plausibility and Perception of Personalized Virtual Humans between Virtual and Augmented Reality
%U https://downloads.hci.informatik.uni-wuerzburg.de/2022-ismar-avatar_plausibility_and_perception_display_study-preprint.pdf
%X This article investigates the effects of different XR displays on the
perception and plausibility of personalized virtual humans. We compared immersive virtual reality (VR), video see-through augmented reality (VST AR), and optical see-through AR (OST AR). The personalized virtual alter egos were generated by state-of-the-art photogrammetry methods. 42 participants were repeatedly exposed to animated versions of their 3D-reconstructed virtual alter egos in each of the three XR display conditions. The reconstructed virtual alter egos were additionally modified in body weight for each repetition. We show that the display types lead to different degrees of incongruence between the renderings of the virtual humans and the presentation of the respective environmental backgrounds, leading to significant effects of perceived mismatches as part of a plausibility measurement. The device-related effects were further partly confirmed by subjective misestimations of the modified body weight and the measured spatial presence. Here, the exceedingly incongruent OST AR condition leads to the significantly highest weight misestimations as well as to the lowest perceived spatial presence. However, similar effects could not be confirmed for the affective appraisal (i.e., humanness, eeriness, or attractiveness) of the virtual humans, giving rise to the assumption that these factors might be unrelated to each other.
@inproceedings{wolf2022plausibility,
abstract = {This article investigates the effects of different XR displays on the
perception and plausibility of personalized virtual humans. We compared immersive virtual reality (VR), video see-through augmented reality (VST AR), and optical see-through AR (OST AR). The personalized virtual alter egos were generated by state-of-the-art photogrammetry methods. 42 participants were repeatedly exposed to animated versions of their 3D-reconstructed virtual alter egos in each of the three XR display conditions. The reconstructed virtual alter egos were additionally modified in body weight for each repetition. We show that the display types lead to different degrees of incongruence between the renderings of the virtual humans and the presentation of the respective environmental backgrounds, leading to significant effects of perceived mismatches as part of a plausibility measurement. The device-related effects were further partly confirmed by subjective misestimations of the modified body weight and the measured spatial presence. Here, the exceedingly incongruent OST AR condition leads to the significantly highest weight misestimations as well as to the lowest perceived spatial presence. However, similar effects could not be confirmed for the affective appraisal (i.e., humanness, eeriness, or attractiveness) of the virtual humans, giving rise to the assumption that these factors might be unrelated to each other.},
added-at = {2022-08-23T16:39:06.000+0200},
author = {Wolf, Erik and Mal, David and Frohnapfel, Viktor and Döllinger, Nina and Wenninger, Stephan and Botsch, Mario and Latoschik, Marc Erich and Wienrich, Carolin},
biburl = {https://www.bibsonomy.org/bibtex/2712d04b52d6b79c2a53ff43a898905f0/hci-uwb},
booktitle = {2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
doi = {10.1109/ISMAR55827.2022.00065},
interhash = {354794683ba452e82d4590b7c3f3b573},
intrahash = {712d04b52d6b79c2a53ff43a898905f0},
keywords = {dmal emlab ewolf myown vitras},
pages = {489-498},
timestamp = {2024-11-21T09:27:11.000+0100},
title = {Plausibility and Perception of Personalized Virtual Humans between Virtual and Augmented Reality},
url = {https://downloads.hci.informatik.uni-wuerzburg.de/2022-ismar-avatar_plausibility_and_perception_display_study-preprint.pdf},
year = 2022
}