@inproceedings{Mauser2014, author = {Mauser, Stanislas}, title = {Analysis of finger- and palm-based interaction paradigms for touch-free gesture-based control of medical devices with the Leap Motion Controller}, series = {Human-Centered Computing : Informatik-Konferenz an der Hochschule Reutlingen, 30. April 2014. - (Informatics Inside ; 14)}, booktitle = {Human-Centered Computing : Informatik-Konferenz an der Hochschule Reutlingen, 30. April 2014. - (Informatics Inside ; 14)}, publisher = {Hochschule Reutlingen}, address = {Reutlingen}, isbn = {978-3-00-045427-1}, url = {https://nbn-resolving.org/urn:nbn:de:bsz:rt2-opus4-18506}, pages = {34 -- 44}, year = {2014}, abstract = {There are several intra-operative use cases which require the surgeon to interact with medical devices. I used the Leap Motion Controller as input device for three use-cases: 2D-interaction (e.g. advancing EPR data), selection of a value (e.g. room illumination brightness) and an application point and click scenario. I evaluated the Palm Mouse as the most suitable gesture solution to coordinate the mouse and advise to use the implementation using all fingers to perform a click. This small case study introduces the implementations and methods that result those recommendations.}, language = {en} } @incollection{MauserBurgert2014, author = {Mauser, Stanislas and Burgert, Oliver}, title = {Touch-free, gesture-based control of medical devices and software based on the leap motion controller}, series = {Medicine Meets Virtual Reality 21}, booktitle = {Medicine Meets Virtual Reality 21}, editor = {Westwood, James and Fell{\"a}nder-Tsai, Li and Haluck, Randy and Westwood, Susan}, publisher = {IOS Press}, address = {Amsterdam}, isbn = {978-1-61499-375-9}, doi = {10.3233/978-1-61499-375-9-265}, url = {https://nbn-resolving.org/urn:nbn:de:bsz:rt2-opus4-275}, pages = {265 -- 270}, year = {2014}, abstract = {There are several intra-operative use cases which require the surgeon to interact with medical devices. We used the Leap Motion Controller as input device and implemented two use-cases: 2D-Interaction (e.g. advancing EPR data) and selection of a value (e.g. room illumination brightness). The gesture detection was successful and we mapped its output to several devices and systems.}, language = {en} }