2020 May 27
User Gesture Elicitation of Common Smartphone Tasks for Hand Proximate User Interfaces
Type:
Conference
Authors:
Shariff AM Faleel; Michael Gammon; Yumiko Sakamoto; Carlo Menon; Pourang Irani
Venue:
Augmented Human (AH '20)
Date of publication:
2020 May 27
Abstract:
The ubiquity of smartphone interactions along with the advancements made in mixed reality applications and gesture recognition present an intriguing space for novel interaction techniques using the hand as an interface. This paper explores the idea of using hand proximate user interfaces (UI), i.e. interactions with and display of interface elements on and around the hand. We conducted two user studies to gain a better understanding of the design space for such interactions. The first study identifies the possible ways in which various elements can be displayed on and around the hand in the context of common smartphone applications. We conduct a second study to build a gesture set for interactions with elements displayed on and around the hand. We contribute an analysis of the data and observations collected from the two studies, resulting in a layout set and a gesture set for interactions with hand proximate UIs.
Citation:
Shariff A. M. Faleel, Michael Gammon, Yumiko Sakamoto, Carlo Menon, and Pourang Irani. 2020. User gesture elicitation of common smartphone tasks for hand proximate user interfaces. In <i>Proceedings of the 11th Augmented Human International Conference</i> (<i>AH '20</i>). Association for Computing Machinery, New York, NY, USA, Article 6, 1–8. https://doi.org/10.1145/3396339.3396363
@inproceedings{shariff20_user_gesture,
author = {Faleel, Shariff A. M. and Gammon, Michael and Sakamoto, Yumiko and Menon, Carlo and Irani, Pourang},
title = {User Gesture Elicitation of Common Smartphone Tasks for Hand Proximate User Interfaces},
year = {2020},
isbn = {9781450377287},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/3396339.3396363},
doi = {10.1145/3396339.3396363},
abstract = {The ubiquity of smartphone interactions along with the advancements made in mixed reality applications and gesture recognition present an intriguing space for novel interaction techniques using the hand as an interface. This paper explores the idea of using hand proximate user interfaces (UI), i.e. interactions with and display of interface elements on and around the hand. We conducted two user studies to gain a better understanding of the design space for such interactions. The first study identifies the possible ways in which various elements can be displayed on and around the hand in the context of common smartphone applications. We conduct a second study to build a gesture set for interactions with elements displayed on and around the hand. We contribute an analysis of the data and observations collected from the two studies, resulting in a layout set and a gesture set for interactions with hand proximate UIs.},
booktitle = {Proceedings of the 11th Augmented Human International Conference},
articleno = {6},
numpages = {8},
keywords = {gestural input, one-handed interaction, elicitation study, mixed-reality interactions, user-defined gestures},
location = {Winnipeg, Manitoba, Canada},
series = {AH '20}
}