Humans are inherently skilled at using subtle physiological cues from other persons, for example gaze direction in a conversation. Personal computers have yet to explore this implicit input modality. In a study with 14 participants, we investigate how a user's gaze can be leveraged in adaptive computer systems. In particular, we examine the impact of different languages on eye movements by presenting simple questions in multiple languages to our participants. We found that fixation duration is sufficient to ascertain if a user is highly proficient in a given language. We propose how these findings could be used to implement adaptive visualizations that react implicitly on the user's gaze.
%0 Conference Paper
%1 noauthororeditor2016towards
%A Karolus, Jakob
%A W. Woźniak, Paweł
%A Chuang, Lewis L.
%B Proceedings of the 9th Nordic Conference on Human-Computer Interaction (NordiCHI)
%C New York, NY, USA
%D 2016
%I ACM
%K 2016 C02 from:leonkokkoliadis sfbtrr161
%P 118:1-118:6
%R 10.1145/2971485.2996753
%T Towards Using Gaze Properties to Detect Language Proficiency
%U https://doi.org/10.1145/2971485.2996753
%X Humans are inherently skilled at using subtle physiological cues from other persons, for example gaze direction in a conversation. Personal computers have yet to explore this implicit input modality. In a study with 14 participants, we investigate how a user's gaze can be leveraged in adaptive computer systems. In particular, we examine the impact of different languages on eye movements by presenting simple questions in multiple languages to our participants. We found that fixation duration is sufficient to ascertain if a user is highly proficient in a given language. We propose how these findings could be used to implement adaptive visualizations that react implicitly on the user's gaze.
@inproceedings{noauthororeditor2016towards,
abstract = {Humans are inherently skilled at using subtle physiological cues from other persons, for example gaze direction in a conversation. Personal computers have yet to explore this implicit input modality. In a study with 14 participants, we investigate how a user's gaze can be leveraged in adaptive computer systems. In particular, we examine the impact of different languages on eye movements by presenting simple questions in multiple languages to our participants. We found that fixation duration is sufficient to ascertain if a user is highly proficient in a given language. We propose how these findings could be used to implement adaptive visualizations that react implicitly on the user's gaze.},
added-at = {2020-03-11T13:28:46.000+0100},
address = {New York, NY, USA},
author = {Karolus, Jakob and W. Woźniak, Paweł and Chuang, Lewis L.},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/2a4799cd808a15dc9ad1fd3ae43744ff5/sfbtrr161},
booktitle = {Proceedings of the 9th Nordic Conference on Human-Computer Interaction (NordiCHI)},
doi = {10.1145/2971485.2996753},
interhash = {18b16dd557494a1864c2a90bf4a3f597},
intrahash = {a4799cd808a15dc9ad1fd3ae43744ff5},
keywords = {2016 C02 from:leonkokkoliadis sfbtrr161},
pages = {118:1-118:6},
publisher = {ACM},
timestamp = {2020-03-11T12:39:38.000+0100},
title = {Towards Using Gaze Properties to Detect Language Proficiency},
url = {https://doi.org/10.1145/2971485.2996753},
year = 2016
}