In public display contexts, interactions are spontaneous and have to work without preparation. We propose gaze as a modality for such con- texts, as gaze is always at the ready, and a natural indicator of the user’s interest. We present GazeHorizon, a system that demonstrates sponta- neous gaze interaction, enabling users to walk up to a display and navi- gate content using their eyes only. GazeHorizon is extemporaneous and optimised for instantaneous usability by any user without prior configura- tion, calibration or training. The system provides interactive assistance to bootstrap gaze interaction with unaware users, employs a single off-the- shelf web camera and computer vision for person-independent tracking of the horizontal gaze direction, and maps this input to rate-controlled nav- igation of horizontally arranged content. We have evaluated GazeHorizon through a series of field studies, culminating in a four-day deployment in a public environment during which over a hundred passers-by interacted with it, unprompted and unassisted. We realised that since eye move- ments are subtle, users cannot learn gaze interaction from only observing others, and as a results guidance is required.
%0 Journal Article
%1 zhang15_puc
%A Zhang, Yanxia
%A Chong, Ming Ki
%A Müller, Jörg
%A Bulling, Andreas
%A Gellersen, Hans
%D 2015
%J Springer Personal and Ubiquitous Computing
%K Calibration-free; Deployment Eye Gaze In-the-wild Public Scrolling; displays; hcics interaction; study; tracking; vis
%N 5
%P 967-981
%R 10.1007/s00779-015-0866-8
%T Eye tracking for public displays in the wild
%V 19
%X In public display contexts, interactions are spontaneous and have to work without preparation. We propose gaze as a modality for such con- texts, as gaze is always at the ready, and a natural indicator of the user’s interest. We present GazeHorizon, a system that demonstrates sponta- neous gaze interaction, enabling users to walk up to a display and navi- gate content using their eyes only. GazeHorizon is extemporaneous and optimised for instantaneous usability by any user without prior configura- tion, calibration or training. The system provides interactive assistance to bootstrap gaze interaction with unaware users, employs a single off-the- shelf web camera and computer vision for person-independent tracking of the horizontal gaze direction, and maps this input to rate-controlled nav- igation of horizontally arranged content. We have evaluated GazeHorizon through a series of field studies, culminating in a four-day deployment in a public environment during which over a hundred passers-by interacted with it, unprompted and unassisted. We realised that since eye move- ments are subtle, users cannot learn gaze interaction from only observing others, and as a results guidance is required.
@article{zhang15_puc,
abstract = {In public display contexts, interactions are spontaneous and have to work without preparation. We propose gaze as a modality for such con- texts, as gaze is always at the ready, and a natural indicator of the user’s interest. We present GazeHorizon, a system that demonstrates sponta- neous gaze interaction, enabling users to walk up to a display and navi- gate content using their eyes only. GazeHorizon is extemporaneous and optimised for instantaneous usability by any user without prior configura- tion, calibration or training. The system provides interactive assistance to bootstrap gaze interaction with unaware users, employs a single off-the- shelf web camera and computer vision for person-independent tracking of the horizontal gaze direction, and maps this input to rate-controlled nav- igation of horizontally arranged content. We have evaluated GazeHorizon through a series of field studies, culminating in a four-day deployment in a public environment during which over a hundred passers-by interacted with it, unprompted and unassisted. We realised that since eye move- ments are subtle, users cannot learn gaze interaction from only observing others, and as a results guidance is required.},
added-at = {2024-07-11T10:05:52.000+0200},
author = {Zhang, Yanxia and Chong, Ming Ki and M\"uller, J\"org and Bulling, Andreas and Gellersen, Hans},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/28b95c5a03b0d3ad271586ee187be0721/hcics},
doi = {10.1007/s00779-015-0866-8},
interhash = {bd731ea171eb3b5630ae587e63f6dbee},
intrahash = {8b95c5a03b0d3ad271586ee187be0721},
journal = {Springer Personal and Ubiquitous Computing},
keywords = {Calibration-free; Deployment Eye Gaze In-the-wild Public Scrolling; displays; hcics interaction; study; tracking; vis},
number = 5,
pages = {967-981},
timestamp = {2024-07-11T10:11:36.000+0200},
title = {Eye tracking for public displays in the wild},
volume = 19,
year = 2015
}