Technical progress in hardware and software enables us to record gaze data in everyday situations and over long time spans. Among a multitude of research opportunities, this technology enables visualization researchers to catch a glimpse behind performance measures and into the perceptual and cognitive processes of people using visualization techniques. The majority of eye tracking studies performed for visualization research is limited to the analysis of gaze distributions and aggregated statistics, thus only covering a small portion of insights that can be derived from gaze data. We argue that incorporating theories and methodology from psychology and cognitive science will benefit the design and evaluation of eye tracking experiments for visualization. This position paper outlines our experiences with eye tracking in visualization and states the benefits that an interdisciplinary research field on visualization psychology might bring for better understanding how people interpret visualizations.
%0 Journal Article
%1 DBLP:journals/corr/abs-2009-14515
%A Kurzhals, Kuno
%A Burch, Michael
%A Weiskopf, Daniel
%D 2020
%J CoRR
%K 2020 b01 sfbtrr161 visus visus:weiskopf
%T What We See and What We Get from Visualization: Eye Tracking Beyond Gaze Distributions and Scanpaths
%U https://arxiv.org/abs/2009.14515
%V abs/2009.14515
%X Technical progress in hardware and software enables us to record gaze data in everyday situations and over long time spans. Among a multitude of research opportunities, this technology enables visualization researchers to catch a glimpse behind performance measures and into the perceptual and cognitive processes of people using visualization techniques. The majority of eye tracking studies performed for visualization research is limited to the analysis of gaze distributions and aggregated statistics, thus only covering a small portion of insights that can be derived from gaze data. We argue that incorporating theories and methodology from psychology and cognitive science will benefit the design and evaluation of eye tracking experiments for visualization. This position paper outlines our experiences with eye tracking in visualization and states the benefits that an interdisciplinary research field on visualization psychology might bring for better understanding how people interpret visualizations.
@article{DBLP:journals/corr/abs-2009-14515,
abstract = {Technical progress in hardware and software enables us to record gaze data in everyday situations and over long time spans. Among a multitude of research opportunities, this technology enables visualization researchers to catch a glimpse behind performance measures and into the perceptual and cognitive processes of people using visualization techniques. The majority of eye tracking studies performed for visualization research is limited to the analysis of gaze distributions and aggregated statistics, thus only covering a small portion of insights that can be derived from gaze data. We argue that incorporating theories and methodology from psychology and cognitive science will benefit the design and evaluation of eye tracking experiments for visualization. This position paper outlines our experiences with eye tracking in visualization and states the benefits that an interdisciplinary research field on visualization psychology might bring for better understanding how people interpret visualizations.},
added-at = {2021-06-16T11:23:43.000+0200},
archiveprefix = {arXiv},
author = {Kurzhals, Kuno and Burch, Michael and Weiskopf, Daniel},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/28e0c7d7865d651dc2f73779a2e321500/christinawarren},
eprint = {2009.14515},
interhash = {20abe8302229f118e5dd5afa0f9c11fd},
intrahash = {8e0c7d7865d651dc2f73779a2e321500},
journal = {CoRR},
keywords = {2020 b01 sfbtrr161 visus visus:weiskopf},
timestamp = {2021-06-16T09:54:52.000+0200},
title = {What We See and What We Get from Visualization: Eye Tracking Beyond Gaze Distributions and Scanpaths},
url = {https://arxiv.org/abs/2009.14515},
volume = {abs/2009.14515},
year = 2020
}