In this demonstration we present a prototype for an avatar-mediated
social interaction interface that supports the replication of head-
and eye movement in distributed virtual environments. In addition
to the retargeting of these natural behaviors, the system is capable
of augmenting the interaction based on the visual presentation of
affective states. We derive those states using neuronal data captured
by electroencephalographic (EEG) sensing in combination with a
machine learning driven classification of emotional states.
%0 Conference Paper
%1 roth2019toappearsocial
%A Roth, Daniel
%A Brübach, Larissa
%A Westermeier, Franziska
%A Schell, Christian
%A Feigl, Tobias
%A Latoschik, Marc Erich
%B Symposium on Spatial User Interaction (SUI '19), October 19--20, 2019, New Orleans, LA, USA
%D 2019
%K droth insync myown vtimes
%R 10.1145/3357251.3360018
%T A Social Interaction Interface Supporting Affective
Augmentation Based on Neuronal Data
%V SUI '19
%X In this demonstration we present a prototype for an avatar-mediated
social interaction interface that supports the replication of head-
and eye movement in distributed virtual environments. In addition
to the retargeting of these natural behaviors, the system is capable
of augmenting the interaction based on the visual presentation of
affective states. We derive those states using neuronal data captured
by electroencephalographic (EEG) sensing in combination with a
machine learning driven classification of emotional states.
%@ 978-1-4503-6975-6/19/10
@inproceedings{roth2019toappearsocial,
abstract = {In this demonstration we present a prototype for an avatar-mediated
social interaction interface that supports the replication of head-
and eye movement in distributed virtual environments. In addition
to the retargeting of these natural behaviors, the system is capable
of augmenting the interaction based on the visual presentation of
affective states. We derive those states using neuronal data captured
by electroencephalographic (EEG) sensing in combination with a
machine learning driven classification of emotional states.},
added-at = {2019-09-02T08:46:42.000+0200},
author = {Roth, Daniel and Brübach, Larissa and Westermeier, Franziska and Schell, Christian and Feigl, Tobias and Latoschik, Marc Erich},
biburl = {https://www.bibsonomy.org/bibtex/23ab0d8cc5b5377708e36946c96e9037d/hci-uwb},
booktitle = {Symposium on Spatial User Interaction (SUI '19), October 19--20, 2019, New Orleans, LA, USA},
doi = {10.1145/3357251.3360018},
interhash = {6ca54b0e1ae713ecb4e74f0a8411ef4e},
intrahash = {3ab0d8cc5b5377708e36946c96e9037d},
isbn = {978-1-4503-6975-6/19/10},
keywords = {droth insync myown vtimes},
timestamp = {2024-05-06T17:22:37.000+0200},
title = {A Social Interaction Interface Supporting Affective
Augmentation Based on Neuronal Data},
volume = {SUI '19},
year = 2019
}