We propose a new technique for visual analytics and annotation of long-term pervasive eye tracking data for which a combined analysis of gaze and egocentric video is necessary. Our approach enables two important tasks for such data for hour-long videos from individual participants: (1) efficient annotation and (2) direct interpretation of the results. Exemplary time spans can be selected by the user and are then used as a query that initiates a fuzzy search of similar time spans based on gaze and video features. In an iterative refinement loop, the query interface then provides suggestions for the importance of individual features to improve the search results. A multi-layered timeline visualization shows an overview of annotated time spans. We demonstrate the efficiency of our approach for analyzing activities in about seven hours of video in a case study and discuss feedback on our approach from novices and experts performing the annotation task.
%0 Conference Paper
%1 10.1145/3379155.3391326
%A Kurzhals, Kuno
%A Rodrigues, Nils
%A Koch, Maurice
%A Stoll, Michael
%A Bruhn, Andres
%A Bulling, Andreas
%A Weiskopf, Daniel
%B Proceedings of the Symposium on Eye Tracking Research & Applications (ETRA)
%D 2020
%I ACM
%K 2020 A07 B01 B05 sfbtrr161 visus:bruhnas visus:bullinas visus:kurzhakn visus:rodrigns visus:weiskopf
%P 16:1-16:9
%R 10.1145/3379155.3391326
%T Visual Analytics and Annotation of Pervasive Eye Tracking Video
%U https://doi.org/10.1145/3379155.3391326
%X We propose a new technique for visual analytics and annotation of long-term pervasive eye tracking data for which a combined analysis of gaze and egocentric video is necessary. Our approach enables two important tasks for such data for hour-long videos from individual participants: (1) efficient annotation and (2) direct interpretation of the results. Exemplary time spans can be selected by the user and are then used as a query that initiates a fuzzy search of similar time spans based on gaze and video features. In an iterative refinement loop, the query interface then provides suggestions for the importance of individual features to improve the search results. A multi-layered timeline visualization shows an overview of annotated time spans. We demonstrate the efficiency of our approach for analyzing activities in about seven hours of video in a case study and discuss feedback on our approach from novices and experts performing the annotation task.
%@ 9781450371339
@inproceedings{10.1145/3379155.3391326,
abstract = {We propose a new technique for visual analytics and annotation of long-term pervasive eye tracking data for which a combined analysis of gaze and egocentric video is necessary. Our approach enables two important tasks for such data for hour-long videos from individual participants: (1) efficient annotation and (2) direct interpretation of the results. Exemplary time spans can be selected by the user and are then used as a query that initiates a fuzzy search of similar time spans based on gaze and video features. In an iterative refinement loop, the query interface then provides suggestions for the importance of individual features to improve the search results. A multi-layered timeline visualization shows an overview of annotated time spans. We demonstrate the efficiency of our approach for analyzing activities in about seven hours of video in a case study and discuss feedback on our approach from novices and experts performing the annotation task.},
added-at = {2020-06-26T13:24:50.000+0200},
articleno = {16},
author = {Kurzhals, Kuno and Rodrigues, Nils and Koch, Maurice and Stoll, Michael and Bruhn, Andres and Bulling, Andreas and Weiskopf, Daniel},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/29edf01db76416efe2b79053c0977f842/leonkokkoliadis},
booktitle = {Proceedings of the Symposium on Eye Tracking Research & Applications (ETRA)},
doi = {10.1145/3379155.3391326},
interhash = {d47d14f505f998258e45400d30ee66f3},
intrahash = {9edf01db76416efe2b79053c0977f842},
isbn = {9781450371339},
keywords = {2020 A07 B01 B05 sfbtrr161 visus:bruhnas visus:bullinas visus:kurzhakn visus:rodrigns visus:weiskopf},
location = {Stuttgart, Germany},
numpages = {9},
pages = {16:1-16:9},
publisher = {ACM},
timestamp = {2020-06-26T11:24:50.000+0200},
title = {Visual Analytics and Annotation of Pervasive Eye Tracking Video},
url = {https://doi.org/10.1145/3379155.3391326},
year = 2020
}