Image thumbnails are a valuable data source for fixation filtering, scanpath classification, and visualization of eye tracking data. They are typically extracted with a constant size, approximating the foveated area. As a consequence, the focused area of interest in the scene becomes less prominent in the thumbnail with increasing distance, affecting image-based analysis techniques. In this work, we propose depth-adaptive thumbnails, a method for varying image size according to the eye-to-object distance. Adjusting the visual angle relative to the distance leads to a zoom effect on the focused area. We evaluate our approach on recordings in augmented reality, investigating the similarity of thumbnails and scanpaths. Our quantitative findings suggest that considering the eye-to-object distance improves the quality of data analysis and visualization. We demonstrate the utility of depth-adaptive thumbnails for applications in scanpath comparison and visualization.
%0 Conference Paper
%1 10.1145/3649902.3653349
%A Koch, Maurice
%A Pathmanathan, Nelusa
%A Weiskopf, Daniel
%A Kurzhals, Kuno
%B Proceedings of the 2024 Symposium on Eye Tracking Research and Applications
%C New York, NY, USA
%D 2024
%I Association for Computing Machinery
%K
%R 10.1145/3649902.3653349
%T How Deep Is Your Gaze? Leveraging Distance in Image-Based Gaze Analysis
%U https://doi.org/10.1145/3649902.3653349
%X Image thumbnails are a valuable data source for fixation filtering, scanpath classification, and visualization of eye tracking data. They are typically extracted with a constant size, approximating the foveated area. As a consequence, the focused area of interest in the scene becomes less prominent in the thumbnail with increasing distance, affecting image-based analysis techniques. In this work, we propose depth-adaptive thumbnails, a method for varying image size according to the eye-to-object distance. Adjusting the visual angle relative to the distance leads to a zoom effect on the focused area. We evaluate our approach on recordings in augmented reality, investigating the similarity of thumbnails and scanpaths. Our quantitative findings suggest that considering the eye-to-object distance improves the quality of data analysis and visualization. We demonstrate the utility of depth-adaptive thumbnails for applications in scanpath comparison and visualization.
%@ 9798400706073
@inproceedings{10.1145/3649902.3653349,
abstract = {Image thumbnails are a valuable data source for fixation filtering, scanpath classification, and visualization of eye tracking data. They are typically extracted with a constant size, approximating the foveated area. As a consequence, the focused area of interest in the scene becomes less prominent in the thumbnail with increasing distance, affecting image-based analysis techniques. In this work, we propose depth-adaptive thumbnails, a method for varying image size according to the eye-to-object distance. Adjusting the visual angle relative to the distance leads to a zoom effect on the focused area. We evaluate our approach on recordings in augmented reality, investigating the similarity of thumbnails and scanpaths. Our quantitative findings suggest that considering the eye-to-object distance improves the quality of data analysis and visualization. We demonstrate the utility of depth-adaptive thumbnails for applications in scanpath comparison and visualization.},
added-at = {2024-06-13T10:17:34.000+0200},
address = {New York, NY, USA},
articleno = {13},
author = {Koch, Maurice and Pathmanathan, Nelusa and Weiskopf, Daniel and Kurzhals, Kuno},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/2b447e42b827a58936d30974ccab8afa1/intcdc},
booktitle = {Proceedings of the 2024 Symposium on Eye Tracking Research and Applications},
doi = {10.1145/3649902.3653349},
interhash = {11aa01260b0964129ce0c447d9df4002},
intrahash = {b447e42b827a58936d30974ccab8afa1},
isbn = {9798400706073},
keywords = {},
location = {<conf-loc>, <city>Glasgow</city>, <country>United Kingdom</country>, </conf-loc>},
numpages = {7},
publisher = {Association for Computing Machinery},
series = {ETRA '24},
timestamp = {2024-06-13T10:17:34.000+0200},
title = {How Deep Is Your Gaze? Leveraging Distance in Image-Based Gaze Analysis},
url = {https://doi.org/10.1145/3649902.3653349},
year = 2024
}