@inproceedings{fe86da0cca0c4053b57831f736d5460e,
title = "Accessible Gesture Typing on Smartphones for People with Low Vision",
abstract = "While gesture typing is widely adopted on touchscreen keyboards, its support for low vision users is limited. We have designed and implemented two keyboard prototypes, layout-magnified and key-magnified keyboards, to enable gesture typing for people with low vision. Both keyboards facilitate uninterrupted access to all keys while the screen magnifier is active, allowing people with low vision to input text with one continuous stroke. Furthermore, we have created a kinematics-based decoding algorithm to accommodate the typing behavior of people with low vision. This algorithm can decode the gesture input even if the gesture trace deviates from a pre-defined word template, and the starting position of the gesture is far from the starting letter of the target word. Our user study showed that the key-magnified keyboard achieved 5.28 words per minute, 27.5\% faster than a conventional gesture typing keyboard with voice feedback.",
keywords = "accessibility, gesture input, low vision, smartphone keyboard, text input, word gesture",
author = "Dan Zhang and Zhi Li and Vikas Ashok and Seiple, \{William H.\} and Iv Ramakrishnan and Xiaojun Bi",
note = "Publisher Copyright: {\textcopyright} 2024 ACM.; 37th Annual ACM Symposium on User Interface Software and Technology, UIST 2024 ; Conference date: 13-10-2024 Through 16-10-2024",
year = "2024",
month = oct,
day = "13",
doi = "10.1145/3654777.3676447",
language = "English",
series = "UIST 2024 - Proceedings of the 37th Annual ACM Symposium on User Interface Software and Technology",
publisher = "Association for Computing Machinery, Inc",
booktitle = "UIST 2024 - Proceedings of the 37th Annual ACM Symposium on User Interface Software and Technology",
}