The development of an intelligent user interface that supports multimodal access to multiple applications is a challenging task. In this paper we present a generic multimodal interface system where the user interacts with an anthropomorphic personalized interface agent using speech and natural gestures. The knowledge-based and uniform approach of SmartKom enables us to realize a comprehensive system that understands imprecise, ambiguous, or incomplete multimodal input and generates coordinated, cohesive, and coherent multimodal presentations for three scenarios, currently addressing more than 50 different functionalities of 14 applications. We demonstrate the main ideas in a walk through the main processing steps from modality fusion to modality fission.
%0 Conference Paper
%1 ReithingerAlexanderssonEtAl03ICMI
%A Reithinger, Norbert
%A Alexandersson, Jan
%A Becker, Tilman
%A Blocher, Anselm
%A Engel, Ralf
%A Löckelt, Markus
%A Müller, Jochen
%A Pfleger, Norbert
%A Poller, Peter
%A Streit, Michael
%A Tschernomas, Valentin
%B Proceedings of the 5th International Conference on Multimodal Interfaces (ICMI '03), Vancouver, Canada
%D 2003
%K v1205 acm paper ai dfki user interface multimodal dialog interaction smartkom zzz.mmi
%P 101--108
%R 10.1145/958432.958454
%T SmartKom: Adaptive and Flexible Multimodal Access to Multiple Applications
%X The development of an intelligent user interface that supports multimodal access to multiple applications is a challenging task. In this paper we present a generic multimodal interface system where the user interacts with an anthropomorphic personalized interface agent using speech and natural gestures. The knowledge-based and uniform approach of SmartKom enables us to realize a comprehensive system that understands imprecise, ambiguous, or incomplete multimodal input and generates coordinated, cohesive, and coherent multimodal presentations for three scenarios, currently addressing more than 50 different functionalities of 14 applications. We demonstrate the main ideas in a walk through the main processing steps from modality fusion to modality fission.
@inproceedings{ReithingerAlexanderssonEtAl03ICMI,
abstract = {The development of an intelligent user interface that supports multimodal access to multiple applications is a challenging task. In this paper we present a generic multimodal interface system where the user interacts with an anthropomorphic personalized interface agent using speech and natural gestures. The knowledge-based and uniform approach of SmartKom enables us to realize a comprehensive system that understands imprecise, ambiguous, or incomplete multimodal input and generates coordinated, cohesive, and coherent multimodal presentations for three scenarios, currently addressing more than 50 different functionalities of 14 applications. We demonstrate the main ideas in a walk through the main processing steps from modality fusion to modality fission.},
added-at = {2012-05-30T10:52:52.000+0200},
author = {Reithinger, Norbert and Alexandersson, Jan and Becker, Tilman and Blocher, Anselm and Engel, Ralf and L\"{o}ckelt, Markus and M\"{u}ller, Jochen and Pfleger, Norbert and Poller, Peter and Streit, Michael and Tschernomas, Valentin},
biburl = {https://www.bibsonomy.org/bibtex/20bb1079fcfd2341e2018485907b707a5/flint63},
booktitle = {Proceedings of the 5th International Conference on Multimodal Interfaces (ICMI '03), Vancouver, Canada},
doi = {10.1145/958432.958454},
file = {ACM Digital Library:2000-04/ReithingerAlexanderssonEtAl03ICMI.pdf:PDF},
groups = {public},
interhash = {ce9ae9e00e8683a2e0538c6549aacfe4},
intrahash = {0bb1079fcfd2341e2018485907b707a5},
keywords = {v1205 acm paper ai dfki user interface multimodal dialog interaction smartkom zzz.mmi},
pages = {101--108},
timestamp = {2018-04-16T12:29:47.000+0200},
title = {{SmartKom}: Adaptive and Flexible Multimodal Access to Multiple Applications},
username = {flint63},
year = 2003
}