Multimodal interfaces are becoming more and more an emerging leading-edge area of interest in the Digital Art and Entertainment Domain. This paper aims at proposing a new approach to develop applications relying on integration of multimodal modules which can capture and collect the emotional states of the audience, process them and provide a final emotional output. A concrete framework has been designed and is being developed to provide application designers with a powerful and easy-to-use set of tools for building up multimodal affective interfaces. The proposed approach has been conceived inside the FP6 EU co-funded CALLAS (Conveying Affectiveness in Leading-edge Living Adaptive Systems) Project.
%0 Journal Article
%1 ArnoneRossiBertoncini10jmui
%A Arnone, Diego
%A Rossi, Alessandro
%A Bertoncini, Massimo
%D 2010
%J Journal on Multimodal User Interfaces
%K 01801 springer paper ai multimodal user interface interaction emotion framework entertain zzz.mmi
%N 3
%P 227--236
%R 10.1007/s12193-010-0035-3
%T An Open Source Integrated Framework for Rapid Prototyping of Multimodal Affective Applications in Digital Entertainment
%V 3
%X Multimodal interfaces are becoming more and more an emerging leading-edge area of interest in the Digital Art and Entertainment Domain. This paper aims at proposing a new approach to develop applications relying on integration of multimodal modules which can capture and collect the emotional states of the audience, process them and provide a final emotional output. A concrete framework has been designed and is being developed to provide application designers with a powerful and easy-to-use set of tools for building up multimodal affective interfaces. The proposed approach has been conceived inside the FP6 EU co-funded CALLAS (Conveying Affectiveness in Leading-edge Living Adaptive Systems) Project.
@article{ArnoneRossiBertoncini10jmui,
abstract = {Multimodal interfaces are becoming more and more an emerging leading-edge area of interest in the Digital Art and Entertainment Domain. This paper aims at proposing a new approach to develop applications relying on integration of multimodal modules which can capture and collect the emotional states of the audience, process them and provide a final emotional output. A concrete framework has been designed and is being developed to provide application designers with a powerful and easy-to-use set of tools for building up multimodal affective interfaces. The proposed approach has been conceived inside the {FP6} EU co-funded CALLAS (Conveying Affectiveness in Leading-edge Living Adaptive Systems) Project.},
added-at = {2018-02-15T15:04:18.000+0100},
author = {Arnone, Diego and Rossi, Alessandro and Bertoncini, Massimo},
biburl = {https://www.bibsonomy.org/bibtex/2f34c98743512ceff9a7faf8095badefc/flint63},
doi = {10.1007/s12193-010-0035-3},
file = {SpringerLink:2010/ArnoneRossiBertoncini10jmui.pdf:PDF},
groups = {public},
interhash = {80ca1c44c3db1b3629ba4b0c50cb6f16},
intrahash = {f34c98743512ceff9a7faf8095badefc},
issn = {1783-7677},
journal = {Journal on Multimodal User Interfaces},
keywords = {01801 springer paper ai multimodal user interface interaction emotion framework entertain zzz.mmi},
month = {#apr#},
number = 3,
pages = {227--236},
timestamp = {2018-04-16T11:38:00.000+0200},
title = {An Open Source Integrated Framework for Rapid Prototyping of Multimodal Affective Applications in Digital Entertainment},
username = {flint63},
volume = 3,
year = 2010
}