
{  
   "types" : {
      "Bookmark" : {
         "pluralLabel" : "Bookmarks"
      },
      "Publication" : {
         "pluralLabel" : "Publications"
      },
      "GoldStandardPublication" : {
         "pluralLabel" : "GoldStandardPublications"
      },
      "GoldStandardBookmark" : {
         "pluralLabel" : "GoldStandardBookmarks"
      },
      "Tag" : {
         "pluralLabel" : "Tags"
      },
      "User" : {
         "pluralLabel" : "Users"
      },
      "Group" : {
         "pluralLabel" : "Groups"
      },
      "Sphere" : {
         "pluralLabel" : "Spheres"
      }
   },
   
   "properties" : {
      "count" : {
         "valueType" : "number"
      },
      "date" : {
         "valueType" : "date"
      },
      "changeDate" : {
         "valueType" : "date"
      },
      "url" : {
         "valueType" : "url"
      },
      "id" : {
         "valueType" : "url"
      },
      "tags" : {
         "valueType" : "item"
      },
      "user" : {
         "valueType" : "item"
      }      
   },
   
   "items" : [
   	  
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/2aa3b2fa4a76d2adbdb12601a8e1f303d/xingyaoyu",         
         "tags" : [
            "myown","visus:riglinsn","intcdc","simtech","IntCDC","visus:sedlmaml","visus:yuxo","visus","exc2075","exc2075(from2019)"
         ],
         
         "intraHash" : "aa3b2fa4a76d2adbdb12601a8e1f303d",
         "interHash" : "24da0c7c6333c7c0e7cb91add0f381fc",
         "label" : "\"In Your Face!\": Visualizing Fitness Tracker Data in Augmented Reality",
         "user" : "xingyaoyu",
         "description" : "",
         "date" : "2023-10-04 15:49:29",
         "changeDate" : "2023-10-04 16:17:20",
         "count" : 11,
         "pub-type": "inproceedings",
         "booktitle": "Extended Abstracts of the 2023 CHI Conference on Human Factors in Computing Systems","series": "CHI EA '23","publisher":"Association for Computing Machinery","address":"New York, NY, USA",
         "year": "2023", 
         "url": "https://doi.org/10.1145/3544549.3585912", 
         
         "author": [ 
            "Sebastian Rigling","Xingyao Yu","Michael Sedlmair"
         ],
         "authors": [
         	
            	{"first" : "Sebastian",	"last" : "Rigling"},
            	{"first" : "Xingyao",	"last" : "Yu"},
            	{"first" : "Michael",	"last" : "Sedlmair"}
         ],
         "pages": "1\u20137","abstract": "The benefits of augmented reality (AR) have been demonstrated in both medicine and fitness, while its application in areas where these two fields overlap has been barely explored. We argue that AR opens up new opportunities to interact with, understand and share personal health data. To this end, we developed an app prototype that uses a Snapchat-like face filter to visualize personal health data from a fitness tracker in AR. We tested this prototype in two pilot studies and found that AR does have potential in this type of application. We suggest that AR cannot replace the current interfaces of smartwatches and mobile apps, but it can pick up where current technology falls short in creating intrinsic motivation and personal health awareness. We also provide ideas for future work in this direction.",
         "isbn" : "9781450394222",
         
         "location" : "Hamburg, Germany",
         
         "doi" : "10.1145/3544549.3585912",
         
         "bibtexKey": "Rigling2023"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/23ee24270cbfeeeda24d47639eb429e10/frankheyen",         
         "tags" : [
            "myown","intcdc","rp4","visus:heyenfk","visus:achberar","visus:sedlmaml","peerreviewed","from:frankheyen","visus"
         ],
         
         "intraHash" : "3ee24270cbfeeeda24d47639eb429e10",
         "interHash" : "e69f77a4eb5dfbd499f83cc13f62f6b8",
         "label" : "Touching data with PropellerHand",
         "user" : "frankheyen",
         "description" : "",
         "date" : "2022-08-05 10:32:29",
         "changeDate" : "2022-08-05 08:32:29",
         "count" : 3,
         "pub-type": "article",
         "journal": "Journal of Visualization",
         "year": "2022", 
         "url": "https://doi.org/10.1007/s12650-022-00859-2", 
         
         "author": [ 
            "Alexander Achberger","Frank Heyen","Kresimir Vidackovic","Michael Sedlmair"
         ],
         "authors": [
         	
            	{"first" : "Alexander",	"last" : "Achberger"},
            	{"first" : "Frank",	"last" : "Heyen"},
            	{"first" : "Kresimir",	"last" : "Vidackovic"},
            	{"first" : "Michael",	"last" : "Sedlmair"}
         ],
         "abstract": "Immersive analytics often takes place in virtual environments which promise the users immersion. To fulfill this promise, sensory feedback, such as haptics, is an important component, which is however not well supported yet. Existing haptic devices are often expensive, stationary, or occupy the user\u2019s hand, preventing them from grasping objects or using a controller. We propose PropellerHand, an ungrounded hand-mounted haptic device with two rotatable propellers, that allows exerting forces on the hand without obstructing hand use. PropellerHand is able to simulate feedback such as weight and torque by generating thrust up to 11 N in 2-DOF and a torque of 1.87 Nm in 2-DOF. Its design builds on our experience from quantitative and qualitative experiments with different form factors and parts. We evaluated our prototype through a qualitative user study in various VR scenarios that required participants to manipulate virtual objects in different ways, while changing between torques and directional forces. Results show that PropellerHand improves users\u2019 immersion in virtual reality. Additionally, we conducted a second user study in the field of immersive visualization to investigate the potential benefits of PropellerHand there.",
         "doi" : "10.1007/s12650-022-00859-2",
         
         "bibtexKey": "achberger2022touching"

      }
	  
   ]
}
