P. Ehlen, und M. Johnston. Proceedings of the 16th international conference on Intelligent user interfaces, Seite 435--436. New York, NY, USA, ACM, (2011)
DOI: 10.1145/1943403.1943486
Zusammenfassung
Speak4it is a consumer-oriented mobile search application that leverages multimodal input and output to allow users to search for and act on local business information. It supports true multimodal integration where user inputs can be distributed over multiple input modes. In addition to specifying queries by voice e.g. bike repair shops near the golden gate bridge users can combine speech and gesture, for example, gas stations + <route drawn on display> will return the gas stations along the specified route traced on the display. We describe the underlying multimodal architecture and some challenges of supporting multimodal interaction as a deployed mobile service.
%0 Conference Paper
%1 Ehlen:2011:MLS:1943403.1943486
%A Ehlen, Patrick
%A Johnston, Michael
%B Proceedings of the 16th international conference on Intelligent user interfaces
%C New York, NY, USA
%D 2011
%I ACM
%K information informationretrieval mobile retrieval search
%P 435--436
%R 10.1145/1943403.1943486
%T Multimodal local search in Speak4it
%U http://doi.acm.org/10.1145/1943403.1943486
%X Speak4it is a consumer-oriented mobile search application that leverages multimodal input and output to allow users to search for and act on local business information. It supports true multimodal integration where user inputs can be distributed over multiple input modes. In addition to specifying queries by voice e.g. bike repair shops near the golden gate bridge users can combine speech and gesture, for example, gas stations + <route drawn on display> will return the gas stations along the specified route traced on the display. We describe the underlying multimodal architecture and some challenges of supporting multimodal interaction as a deployed mobile service.
%@ 978-1-4503-0419-1
@inproceedings{Ehlen:2011:MLS:1943403.1943486,
abstract = {Speak4it is a consumer-oriented mobile search application that leverages multimodal input and output to allow users to search for and act on local business information. It supports true multimodal integration where user inputs can be distributed over multiple input modes. In addition to specifying queries by voice e.g. bike repair shops near the golden gate bridge users can combine speech and gesture, for example, gas stations + <route drawn on display> will return the gas stations along the specified route traced on the display. We describe the underlying multimodal architecture and some challenges of supporting multimodal interaction as a deployed mobile service.},
acmid = {1943486},
added-at = {2012-07-02T15:49:24.000+0200},
address = {New York, NY, USA},
author = {Ehlen, Patrick and Johnston, Michael},
biburl = {https://www.bibsonomy.org/bibtex/28bb7fab5c8e96fec12297cdea9688cd8/nadinepietras},
booktitle = {Proceedings of the 16th international conference on Intelligent user interfaces},
doi = {10.1145/1943403.1943486},
interhash = {70b05476d8a0d739665ad59ba92a385b},
intrahash = {8bb7fab5c8e96fec12297cdea9688cd8},
isbn = {978-1-4503-0419-1},
keywords = {information informationretrieval mobile retrieval search},
location = {Palo Alto, CA, USA},
numpages = {2},
pages = {435--436},
publisher = {ACM},
series = {IUI '11},
timestamp = {2012-07-02T15:49:24.000+0200},
title = {Multimodal local search in Speak4it},
url = {http://doi.acm.org/10.1145/1943403.1943486},
year = 2011
}