In this work we analyse the eye movements of people in transit in an everyday environment using a wearable electrooculographic (EOG) system. We compare three approaches for continuous recognition of reading activities: a string matching algorithm which exploits typical characteristics of reading signals, such as saccades and fixations; and two variants of Hidden Markov Models (HMMs) - mixed Gaussian and discrete. The recognition algorithms are evaluated in an experiment performed with eight subjects reading freely chosen text without pictures while sitting at a desk, standing, walking indoors and outdoors, and riding a tram. A total dataset of roughly 6 hours was collected with reading activity accounting for about half of the time. We were able to detect reading activities over all subjects with a top recognition rate of 80.2% (71.0% recall, 11.6% false positives) using string matching. We show that EOG is a potentially robust technique for reading recognition across a number of typical daily situations.
%0 Conference Paper
%1 bulling08_pervasive
%A Bulling, Andreas
%A Ward, Jamie A.
%A Gellersen, Hans
%A Tröster, Gerhard
%B Proc. International Conference on Pervasive Computing (Pervasive)
%D 2008
%K (EOG), Activity Activity, Electrooculography Reading Reading, Recognition Recognition, Transit, hcics of vis wearable
%P 19-37
%R 10.1007/978-3-540-79576-6_2
%T Robust Recognition of Reading Activity in Transit Using Wearable Electrooculography
%X In this work we analyse the eye movements of people in transit in an everyday environment using a wearable electrooculographic (EOG) system. We compare three approaches for continuous recognition of reading activities: a string matching algorithm which exploits typical characteristics of reading signals, such as saccades and fixations; and two variants of Hidden Markov Models (HMMs) - mixed Gaussian and discrete. The recognition algorithms are evaluated in an experiment performed with eight subjects reading freely chosen text without pictures while sitting at a desk, standing, walking indoors and outdoors, and riding a tram. A total dataset of roughly 6 hours was collected with reading activity accounting for about half of the time. We were able to detect reading activities over all subjects with a top recognition rate of 80.2% (71.0% recall, 11.6% false positives) using string matching. We show that EOG is a potentially robust technique for reading recognition across a number of typical daily situations.
@inproceedings{bulling08_pervasive,
abstract = {In this work we analyse the eye movements of people in transit in an everyday environment using a wearable electrooculographic (EOG) system. We compare three approaches for continuous recognition of reading activities: a string matching algorithm which exploits typical characteristics of reading signals, such as saccades and fixations; and two variants of Hidden Markov Models (HMMs) - mixed Gaussian and discrete. The recognition algorithms are evaluated in an experiment performed with eight subjects reading freely chosen text without pictures while sitting at a desk, standing, walking indoors and outdoors, and riding a tram. A total dataset of roughly 6 hours was collected with reading activity accounting for about half of the time. We were able to detect reading activities over all subjects with a top recognition rate of 80.2% (71.0% recall, 11.6% false positives) using string matching. We show that EOG is a potentially robust technique for reading recognition across a number of typical daily situations.},
added-at = {2024-07-11T10:05:52.000+0200},
author = {Bulling, Andreas and Ward, Jamie A. and Gellersen, Hans and Tr{\"{o}}ster, Gerhard},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/2296de80c716061a29203467c3df3afd1/hcics},
booktitle = {Proc. International Conference on Pervasive Computing (Pervasive)},
doi = {10.1007/978-3-540-79576-6_2},
interhash = {d96a55981658e52db83e205d74b211db},
intrahash = {296de80c716061a29203467c3df3afd1},
keywords = {(EOG), Activity Activity, Electrooculography Reading Reading, Recognition Recognition, Transit, hcics of vis wearable},
pages = {19-37},
timestamp = {2024-07-11T10:11:36.000+0200},
title = {Robust {R}ecognition of {R}eading {A}ctivity in {T}ransit {U}sing {W}earable {E}lectrooculography},
year = 2008
}