Hybrid user interfaces are a great opportunity to combine complementary interfaces to make use of the best interface for specific steps in a workflow. This position paper outlines one diverse application field: surgery planning. Planning a surgery is a complex task as the surgical team has to get an overview and understanding of a patient’s medical history and the internal anatomical structures of the organ or region of interest. In this position paper, we outline how different hardware (e.g., mixed reality head-worn devices and physical objects) and interaction concepts (e.g., gesture-based interaction or keyboard and mouse) can create an optimal workflow for surgery planning.
%0 Conference Paper
%1 10322244
%A Reinschluessel, Anke V.
%A Zagermann, Johannes
%B 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)
%D 2023
%K 2023 c01 c07 imported test5
%P 208-210
%R 10.1109/ISMAR-Adjunct60411.2023.00048
%T Exploring Hybrid User Interfaces for Surgery Planning
%U https://ieeexplore.ieee.org/abstract/document/10322244
%X Hybrid user interfaces are a great opportunity to combine complementary interfaces to make use of the best interface for specific steps in a workflow. This position paper outlines one diverse application field: surgery planning. Planning a surgery is a complex task as the surgical team has to get an overview and understanding of a patient’s medical history and the internal anatomical structures of the organ or region of interest. In this position paper, we outline how different hardware (e.g., mixed reality head-worn devices and physical objects) and interaction concepts (e.g., gesture-based interaction or keyboard and mouse) can create an optimal workflow for surgery planning.
@inproceedings{10322244,
abstract = {Hybrid user interfaces are a great opportunity to combine complementary interfaces to make use of the best interface for specific steps in a workflow. This position paper outlines one diverse application field: surgery planning. Planning a surgery is a complex task as the surgical team has to get an overview and understanding of a patient’s medical history and the internal anatomical structures of the organ or region of interest. In this position paper, we outline how different hardware (e.g., mixed reality head-worn devices and physical objects) and interaction concepts (e.g., gesture-based interaction or keyboard and mouse) can create an optimal workflow for surgery planning.},
added-at = {2024-03-15T15:21:08.000+0100},
author = {Reinschluessel, Anke V. and Zagermann, Johannes},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/264e9ed824c7c211e21a6fef26810dc3f/roberta.toscano},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
doi = {10.1109/ISMAR-Adjunct60411.2023.00048},
interhash = {63031d8cd76450ec409aeedbcf5b5cb4},
intrahash = {64e9ed824c7c211e21a6fef26810dc3f},
keywords = {2023 c01 c07 imported test5},
pages = {208-210},
timestamp = {2024-05-14T13:50:44.000+0200},
title = {Exploring Hybrid User Interfaces for Surgery Planning},
url = {https://ieeexplore.ieee.org/abstract/document/10322244},
year = 2023
}