
{  
   "types" : {
      "Bookmark" : {
         "pluralLabel" : "Bookmarks"
      },
      "Publication" : {
         "pluralLabel" : "Publications"
      },
      "GoldStandardPublication" : {
         "pluralLabel" : "GoldStandardPublications"
      },
      "GoldStandardBookmark" : {
         "pluralLabel" : "GoldStandardBookmarks"
      },
      "Tag" : {
         "pluralLabel" : "Tags"
      },
      "User" : {
         "pluralLabel" : "Users"
      },
      "Group" : {
         "pluralLabel" : "Groups"
      },
      "Sphere" : {
         "pluralLabel" : "Spheres"
      }
   },
   
   "properties" : {
      "count" : {
         "valueType" : "number"
      },
      "date" : {
         "valueType" : "date"
      },
      "changeDate" : {
         "valueType" : "date"
      },
      "url" : {
         "valueType" : "url"
      },
      "id" : {
         "valueType" : "url"
      },
      "tags" : {
         "valueType" : "item"
      },
      "user" : {
         "valueType" : "item"
      }      
   },
   
   "items" : [
   	  
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/204cd76a855fe2f2688524e0992bc8c8e/hcics",         
         "tags" : [
            "Computing","Eye","Gaze-based","Interaction,","Mobile","Tracking,","Wearable","eye","hcics","movement,","vis"
         ],
         
         "intraHash" : "04cd76a855fe2f2688524e0992bc8c8e",
         "interHash" : "6677f1640d92f97163907ddae99f72eb",
         "label" : "Pupil: An Open Source Platform for Pervasive Eye Tracking and Mobile Gaze-based Interaction",
         "user" : "hcics",
         "description" : "",
         "date" : "2024-07-11 10:05:52",
         "changeDate" : "2024-07-11 10:11:36",
         "count" : 4,
         "pub-type": "techreport",
         
         "year": "2014", 
         "url": "https://arxiv.org/abs/1405.0006", 
         
         "author": [ 
            "Moritz Kassner","William Patera","Andreas Bulling"
         ],
         "authors": [
         	
            	{"first" : "Moritz",	"last" : "Kassner"},
            	{"first" : "William",	"last" : "Patera"},
            	{"first" : "Andreas",	"last" : "Bulling"}
         ],
         "pages": "1--10","note": "arXiv:1405.0006","abstract": "Commercial head-mounted eye trackers provide useful features to customers in industry and research but are expensive and rely on closed source hardware and software. This limits the application areas and use of mobile eye tracking to expert users and inhibits user-driven development, customisation, and extension. In this paper we present Pupil -- an accessible, affordable, and extensible open source platform for mobile eye tracking and gaze-based interaction. Pupil comprises 1) a light-weight headset with high-resolution cameras, 2) an open source software framework for mobile eye tracking, as well as 3) a graphical user interface (GUI) to playback and visualize video and gaze data. Pupil features high-resolution scene and eye cameras for monocular and binocular gaze estimation. The software and GUI are platform-independent and include state-of-the-art algorithms for real-time pupil detection and tracking, calibration, and accurate gaze estimation. Results of a performance evaluation show that Pupil can provide an average gaze estimation accuracy of 0.6 degree of visual angle (0.08 degree precision) with a latency of the processing pipeline of only 0.045 seconds.",
         "bibtexKey": "kassner14_arxiv"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/2381251de13a0c13447b9f359be320f12/hcics",         
         "tags" : [
            "Computing","Machine","Pervasive","Wearable","computing,","hcics","learning,","processing,","signal","vis"
         ],
         
         "intraHash" : "381251de13a0c13447b9f359be320f12",
         "interHash" : "d53c528916deac562e62b7a2671e5612",
         "label" : "What's in the Eyes for Context-Awareness?",
         "user" : "hcics",
         "description" : "",
         "date" : "2024-07-11 10:05:52",
         "changeDate" : "2024-07-11 10:11:36",
         "count" : 1,
         "pub-type": "article",
         "journal": "IEEE Pervasive Computing",
         "year": "2011", 
         "url": "", 
         
         "author": [ 
            "Andreas Bulling","Daniel Roggen","Gerhard Tröster"
         ],
         "authors": [
         	
            	{"first" : "Andreas",	"last" : "Bulling"},
            	{"first" : "Daniel",	"last" : "Roggen"},
            	{"first" : "Gerhard",	"last" : "Tröster"}
         ],
         "volume": "10","number": "2","pages": "48-57","abstract": "Eye movements are a rich source of information about a person's context. Analyzing the link between eye movements and cognition might even allow us to develop cognition-aware pervasive computing systems that assess a person's cognitive context.",
         "doi" : "10.1109/MPRV.2010.49",
         
         "bibtexKey": "bulling11_pcm"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/28488a975de92a7205c78b3fc95ff2326/hcics",         
         "tags" : [
            "Feature","Ubiquitous","and","computing","evaluation","hcics","processing,","selection,","signal","vis"
         ],
         
         "intraHash" : "8488a975de92a7205c78b3fc95ff2326",
         "interHash" : "c57e2090594a9bf59fee711ed93db230",
         "label" : "Eye Movement Analysis for Activity Recognition Using Electrooculography",
         "user" : "hcics",
         "description" : "",
         "date" : "2024-07-11 10:05:52",
         "changeDate" : "2024-07-11 10:11:36",
         "count" : 1,
         "pub-type": "article",
         "journal": "IEEE Transactions on Pattern Analysis and Machine Intelligence (TPAMI)",
         "year": "2011", 
         "url": "", 
         
         "author": [ 
            "Andreas Bulling","Jamie A. Ward","Hans Gellersen","Gerhard Tröster"
         ],
         "authors": [
         	
            	{"first" : "Andreas",	"last" : "Bulling"},
            	{"first" : "Jamie A.",	"last" : "Ward"},
            	{"first" : "Hans",	"last" : "Gellersen"},
            	{"first" : "Gerhard",	"last" : "Tröster"}
         ],
         "volume": "33","number": "4","pages": "741-753","note": "spotlight","abstract": "In this work we investigate eye movement analysis as a new sensing modality for activity recognition. Eye movement data was recorded using an electrooculography (EOG) system. We first describe and evaluate algorithms for detecting three eye movement characteristics from EOG signals - saccades, fixations, and blinks - and propose a method for assessing repetitive patterns of eye movements. We then devise 90 different features based on these characteristics and select a subset of them using minimum redundancy maximum relevance feature selection (mRMR). We validate the method using an eight participant study in an office environment using an example set of five activity classes: copying a text, reading a printed paper, taking hand-written notes, watching a video, and browsing the web. We also include periods with no specific activity (the NULL class). Using a support vector machine (SVM) classifier and a person-independent (leave-one-out) training scheme, we obtain an average precision of 76.1% and recall of 70.5% over all classes and participants. The work demonstrates the promise of eye-based activity recognition (EAR) and opens up discussion on the wider applicability of EAR to other activities that are difficult, or even impossible, to detect using common sensing modalities.",
         "doi" : "10.1109/TPAMI.2010.86",
         
         "bibtexKey": "bulling11_pami"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/24f6c9958981e8685998dbb12a6ebfe32/hcics",         
         "tags" : [
            "(EOG),","(HCI),","Computing","Context-awareness,","Electrooculography","Eye","Gestures,","Human-Computer","Interaction","Tracking,","Wearable","hcics","vis"
         ],
         
         "intraHash" : "4f6c9958981e8685998dbb12a6ebfe32",
         "interHash" : "bb44e9ef1e678d0cb7258dde2d2abbdf",
         "label" : "It's in Your Eyes - Towards Context-Awareness and Mobile HCI Using Wearable EOG Goggles",
         "user" : "hcics",
         "description" : "",
         "date" : "2024-07-11 10:05:52",
         "changeDate" : "2024-07-11 10:11:36",
         "count" : 1,
         "pub-type": "inproceedings",
         "booktitle": "Proc. ACM International Joint Conference on Pervasive and Ubiquitous Computing (UbiComp)",
         "year": "2008", 
         "url": "", 
         
         "author": [ 
            "Andreas Bulling","Daniel Roggen","Gerhard Tröster"
         ],
         "authors": [
         	
            	{"first" : "Andreas",	"last" : "Bulling"},
            	{"first" : "Daniel",	"last" : "Roggen"},
            	{"first" : "Gerhard",	"last" : "Tröster"}
         ],
         "pages": "84-93","abstract": "In this work we describe the design, implementation and evaluation of a novel eye tracker for context-awareness and mobile HCI applications. In contrast to common systems using video cameras, this compact device relies on Electrooculography (EOG). It consists of goggles with dry electrodes integrated into the frame and a small pocket-worn component with a DSP for real-time EOG signal processing. The device is intended for wearable and standalone use: It can store data locally for long-term recordings or stream processed EOG signals to a remote device over Bluetooth. We describe how eye gestures can be efficiently recognised from EOG signals for HCI purposes. In an experiment conducted with 11 subjects playing a computer game we show that 8 eye gestures of varying complexity can be continuously recognised with equal performance to a state-of-the-art video-based system. Physical activity leads to artefacts in the EOG signal. We describe how these artefacts can be removed using an adaptive filtering scheme and characterise this approach on a 5-subject dataset. In addition to explicit eye movements for HCI, we discuss how the analysis of unconscious eye movements may eventually allow to deduce information on user activity and context not available with current sensing modalities.",
         "doi" : "10.1145/1409635.1409647",
         
         "bibtexKey": "bulling08_ubicomp"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/297bec51b23cbbef77af534015113ea29/hcics",         
         "tags" : [
            "computing","eye","gaze","hcics","interaction,","tracking,","vis","wearable"
         ],
         
         "intraHash" : "97bec51b23cbbef77af534015113ea29",
         "interHash" : "7b01a056fab11481e61c03c37f154adc",
         "label" : "Solar System: Smooth Pursuit Interactions Using EOG Glasses",
         "user" : "hcics",
         "description" : "",
         "date" : "2024-07-11 10:05:52",
         "changeDate" : "2024-07-11 10:11:36",
         "count" : 2,
         "pub-type": "inproceedings",
         "booktitle": "Adj. Proc. ACM International Joint Conference on Pervasive and Ubiquitous Computing (UbiComp)",
         "year": "2016", 
         "url": "", 
         
         "author": [ 
            "Junichi Shimizu","Juyoung Lee","Murtaza Dhuliawala","Andreas Bulling","Thad Starner","Woontack Woo","Kai Kunze"
         ],
         "authors": [
         	
            	{"first" : "Junichi",	"last" : "Shimizu"},
            	{"first" : "Juyoung",	"last" : "Lee"},
            	{"first" : "Murtaza",	"last" : "Dhuliawala"},
            	{"first" : "Andreas",	"last" : "Bulling"},
            	{"first" : "Thad",	"last" : "Starner"},
            	{"first" : "Woontack",	"last" : "Woo"},
            	{"first" : "Kai",	"last" : "Kunze"}
         ],
         "pages": "369-372","abstract": "Solar System implements smooth pursuit eye movement interactions on commercial smart glasses using electrooculography. The system requires no calibration and little to no training. We present a prototype implementation, describe initial user tests and show several application scenarios for hands-free eye gaze interactions.",
         "doi" : "10.1145/2968219.2971376",
         
         "bibtexKey": "shimizu16_ubicomp"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/26ec56d592f782c80d5a3ccde317895fc/hcics",         
         "tags" : [
            "(EOG),","(HCI),","Computing","Context-awareness,","Electrooculography","Eye","Gestures,","Human-Computer","Interaction","Tracking,","Wearable","hcics","vis"
         ],
         
         "intraHash" : "6ec56d592f782c80d5a3ccde317895fc",
         "interHash" : "e687f966a40a4b0ec3b8b18cf47cf752",
         "label" : "Wearable EOG Goggles: Eye-Based Interaction in Everyday Environments",
         "user" : "hcics",
         "description" : "",
         "date" : "2024-07-11 10:05:52",
         "changeDate" : "2024-07-11 10:11:36",
         "count" : 1,
         "pub-type": "inproceedings",
         "booktitle": "Ext. Abstr. ACM SIGCHI Conference on Human Factors in Computing Systems (CHI)",
         "year": "2009", 
         "url": "", 
         
         "author": [ 
            "Andreas Bulling","Daniel Roggen","Gerhard Tröster"
         ],
         "authors": [
         	
            	{"first" : "Andreas",	"last" : "Bulling"},
            	{"first" : "Daniel",	"last" : "Roggen"},
            	{"first" : "Gerhard",	"last" : "Tröster"}
         ],
         "pages": "3259-3264","abstract": "In this paper, we present an embedded eye tracker for context-awareness and eye-based human-computer interaction â\u20AC\u201C the wearable EOG goggles. In contrast to common systems using video, this unobtrusive device relies on Electrooculography (EOG). It consists of goggles with dry electrodes integrated into the frame and a small pocket-worn component with a powerful microcontroller for EOG signal processing. Using this lightweight system, sequences of eye movements, so-called eye gestures, can be efficiently recognised from EOG signals in real-time for HCI purposes. The device is self-contained solution and allows for seamless eye motion sensing, context-recognition and eye-based interaction in everyday environments.",
         "doi" : "10.1145/1520340.1520468",
         
         "bibtexKey": "bulling09_chi"

      }
	  
   ]
}
