While visual saliency has recently been studied in 3D, the experimental setup for collecting 3D saliency data can be expensive and cumbersome. To address this challenge, we propose a novel experimental design that utilises an eye tracker on a screen to collect 3D saliency data, which could reduce the cost and complexity of data collection. We first collected gaze data on a computer screen and then mapped the 2D points to 3D saliency data through perspective transformation. Using this method, we propose Saliency3D, a 3D saliency dataset (49,276 fixations) comprising 10 participants looking at sixteen objects. We examined the viewing preferences for objects and our results indicate potential preferred viewing directions and a correlation between salient features and the variation in viewing directions.
%0 Conference Paper
%1 wang24_etra
%A Wang, Yao
%A Dai, Qi
%A Bâce, Mihai
%A Klein, Karsten
%A Bulling, Andreas
%B Proc. ACM International Symposium on Eye Tracking Research and Applications (ETRA)
%D 2024
%I ACM
%K visus:bullinas visus: sfbtrr161 a07 2024 a09 visus:wangyo visus
%P 1--6
%R 10.1145/3649902.3653350
%T Saliency3D: a 3D Saliency Dataset Collected on Screen
%U /brokenurl# https://doi.org/10.1145/3649902.3653350
%X While visual saliency has recently been studied in 3D, the experimental setup for collecting 3D saliency data can be expensive and cumbersome. To address this challenge, we propose a novel experimental design that utilises an eye tracker on a screen to collect 3D saliency data, which could reduce the cost and complexity of data collection. We first collected gaze data on a computer screen and then mapped the 2D points to 3D saliency data through perspective transformation. Using this method, we propose Saliency3D, a 3D saliency dataset (49,276 fixations) comprising 10 participants looking at sixteen objects. We examined the viewing preferences for objects and our results indicate potential preferred viewing directions and a correlation between salient features and the variation in viewing directions.
@inproceedings{wang24_etra,
abstract = {While visual saliency has recently been studied in 3D, the experimental setup for collecting 3D saliency data can be expensive and cumbersome. To address this challenge, we propose a novel experimental design that utilises an eye tracker on a screen to collect 3D saliency data, which could reduce the cost and complexity of data collection. We first collected gaze data on a computer screen and then mapped the 2D points to 3D saliency data through perspective transformation. Using this method, we propose Saliency3D, a 3D saliency dataset (49,276 fixations) comprising 10 participants looking at sixteen objects. We examined the viewing preferences for objects and our results indicate potential preferred viewing directions and a correlation between salient features and the variation in viewing directions.},
added-at = {2024-04-11T09:27:41.000+0200},
author = {Wang, Yao and Dai, Qi and B{\^a}ce, Mihai and Klein, Karsten and Bulling, Andreas},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/253e001f6b156308b639debd56e02e979/visus},
booktitle = {Proc. ACM International Symposium on Eye Tracking Research and Applications (ETRA)},
doi = {10.1145/3649902.3653350},
interhash = {2e21390d22354d8773808550478fcace},
intrahash = {53e001f6b156308b639debd56e02e979},
keywords = {visus:bullinas visus: sfbtrr161 a07 2024 a09 visus:wangyo visus},
pages = {1--6},
publisher = {ACM},
timestamp = {2024-04-11T09:27:41.000+0200},
title = {Saliency3D: a 3D Saliency Dataset Collected on Screen},
url = {/brokenurl# https://doi.org/10.1145/3649902.3653350},
year = 2024
}