Eye tracking has a long history in medical and psychological research as a tool for recording and studying human visual behavior. Real-time gaze-based text entry can also be a powerful means of communication and control for people with physical disabilities. Following recent technological advances and the advent of affordable eye trackers, there is a growing interest in pervasive attention-aware systems and interfaces that have the potential to revolutionize mainstream human-technology interaction. In this chapter, we provide an introduction to the state-of-the art in eye tracking technology and gaze estimation. We discuss challenges involved in using a perceptual organ, the eye, as an input modality. Examples of real life applications are reviewed, together with design solutions derived from research results. We also discuss how to match the user requirements and key features of different eye tracking systems to find the best system for each task and application.
%0 Book Section
%1 MajarantaBulling14c3
%A Majaranta, Päivi
%A Bulling, Andreas
%B Advances in Physiological Computing
%C London
%D 2014
%E Fairclough, Stephen H.
%E Gilleade, Kiel
%I Springer
%K 01614 springer paper ai multimodal interface user interaction image analysis zzz.mmi
%P 39--65
%R 10.1007/978-1-4471-6392-3_3
%T Eye Tracking and Eye-Based Human-Computer Interaction
%X Eye tracking has a long history in medical and psychological research as a tool for recording and studying human visual behavior. Real-time gaze-based text entry can also be a powerful means of communication and control for people with physical disabilities. Following recent technological advances and the advent of affordable eye trackers, there is a growing interest in pervasive attention-aware systems and interfaces that have the potential to revolutionize mainstream human-technology interaction. In this chapter, we provide an introduction to the state-of-the art in eye tracking technology and gaze estimation. We discuss challenges involved in using a perceptual organ, the eye, as an input modality. Examples of real life applications are reviewed, together with design solutions derived from research results. We also discuss how to match the user requirements and key features of different eye tracking systems to find the best system for each task and application.
%& 3
%@ 978-1-4471-6391-6
@incollection{MajarantaBulling14c3,
abstract = {Eye tracking has a long history in medical and psychological research as a tool for recording and studying human visual behavior. Real-time gaze-based text entry can also be a powerful means of communication and control for people with physical disabilities. Following recent technological advances and the advent of affordable eye trackers, there is a growing interest in pervasive attention-aware systems and interfaces that have the potential to revolutionize mainstream human-technology interaction. In this chapter, we provide an introduction to the state-of-the art in eye tracking technology and gaze estimation. We discuss challenges involved in using a perceptual organ, the eye, as an input modality. Examples of real life applications are reviewed, together with design solutions derived from research results. We also discuss how to match the user requirements and key features of different eye tracking systems to find the best system for each task and application.},
added-at = {2016-09-26T15:55:53.000+0200},
address = {London},
author = {Majaranta, P\"{a}ivi and Bulling, Andreas},
biburl = {https://www.bibsonomy.org/bibtex/27424d47a5861456a0df917062f38bc2c/flint63},
booktitle = {Advances in Physiological Computing},
chapter = 3,
crossref = {FaircloughGilleade2014},
doi = {10.1007/978-1-4471-6392-3_3},
editor = {Fairclough, Stephen H. and Gilleade, Kiel},
file = {SpringerLink:2014/MajarantaBulling14c3.pdf:PDF},
groups = {public},
interhash = {47f71a48065619dc1c1c58538d67c2e4},
intrahash = {7424d47a5861456a0df917062f38bc2c},
isbn = {978-1-4471-6391-6},
issn = {1571-5035},
keywords = {01614 springer paper ai multimodal interface user interaction image analysis zzz.mmi},
pages = {39--65},
publisher = {Springer},
series = {Human-Computer Interaction Series},
timestamp = {2018-04-16T12:04:51.000+0200},
title = {Eye Tracking and Eye-Based Human-Computer Interaction},
username = {flint63},
year = 2014
}