This paper introduces “Motion Passwords”, a novel biometric authentication approach where virtual reality users verify their identity by physically writing a chosen word in the air with their hand controller. This method allows combining three layers of verification: knowledge-based password input, handwriting style analysis, and motion profile recognition. As a first step towards realizing this potential, we focus on verifying users based on their motion profiles. We conducted a data collection study with 48 participants, who performed over 3800 Motion Password signatures across two sessions. We assessed the effectiveness of feature-distance and similarity-learning methods for motion-based verification using the Motion Passwords as well as specific and uniform ball-throwing signatures used in previous works. In our results, the similarity-learning model was able to verify users with the same accuracy for both signature types. This demonstrates that Motion Passwords, even when applying only the motion-based verification layer, achieve reliability comparable to previous methods. This highlights the potential for Motion Passwords to become even more reliable with the addition of knowledge-based and handwriting style verification layers. Furthermore, we present a proof-of-concept Unity application demonstrating the registration and verification process with our pretrained similarity-learning model. We publish our code, the Motion Password dataset, the pretrained model, and our Unity prototype on https://github.com/cschell/MoPs
%0 Generic
%1 rack2024motion
%A Rack, Christian
%A Schach, Lukas
%A Achter, Felix
%A Shehada, Yousof
%A Lin, Jinghuai
%A Latoschik, Marc Erich
%B Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology
%C New York, NY, USA
%D 2024
%I Association for Computing Machinery
%K hci-uwb myown schach schell xrhub
%N 19
%P 1-11
%R 10.1145/3641825.3687711
%T Motion Passwords
%U https://doi.org/10.1145/3641825.3687711
%X This paper introduces “Motion Passwords”, a novel biometric authentication approach where virtual reality users verify their identity by physically writing a chosen word in the air with their hand controller. This method allows combining three layers of verification: knowledge-based password input, handwriting style analysis, and motion profile recognition. As a first step towards realizing this potential, we focus on verifying users based on their motion profiles. We conducted a data collection study with 48 participants, who performed over 3800 Motion Password signatures across two sessions. We assessed the effectiveness of feature-distance and similarity-learning methods for motion-based verification using the Motion Passwords as well as specific and uniform ball-throwing signatures used in previous works. In our results, the similarity-learning model was able to verify users with the same accuracy for both signature types. This demonstrates that Motion Passwords, even when applying only the motion-based verification layer, achieve reliability comparable to previous methods. This highlights the potential for Motion Passwords to become even more reliable with the addition of knowledge-based and handwriting style verification layers. Furthermore, we present a proof-of-concept Unity application demonstrating the registration and verification process with our pretrained similarity-learning model. We publish our code, the Motion Password dataset, the pretrained model, and our Unity prototype on https://github.com/cschell/MoPs
%@ 9798400705359
@conference{rack2024motion,
abstract = {This paper introduces “Motion Passwords”, a novel biometric authentication approach where virtual reality users verify their identity by physically writing a chosen word in the air with their hand controller. This method allows combining three layers of verification: knowledge-based password input, handwriting style analysis, and motion profile recognition. As a first step towards realizing this potential, we focus on verifying users based on their motion profiles. We conducted a data collection study with 48 participants, who performed over 3800 Motion Password signatures across two sessions. We assessed the effectiveness of feature-distance and similarity-learning methods for motion-based verification using the Motion Passwords as well as specific and uniform ball-throwing signatures used in previous works. In our results, the similarity-learning model was able to verify users with the same accuracy for both signature types. This demonstrates that Motion Passwords, even when applying only the motion-based verification layer, achieve reliability comparable to previous methods. This highlights the potential for Motion Passwords to become even more reliable with the addition of knowledge-based and handwriting style verification layers. Furthermore, we present a proof-of-concept Unity application demonstrating the registration and verification process with our pretrained similarity-learning model. We publish our code, the Motion Password dataset, the pretrained model, and our Unity prototype on https://github.com/cschell/MoPs},
added-at = {2024-08-18T14:29:41.000+0200},
address = {New York, NY, USA},
author = {Rack, Christian and Schach, Lukas and Achter, Felix and Shehada, Yousof and Lin, Jinghuai and Latoschik, Marc Erich},
biburl = {https://www.bibsonomy.org/bibtex/29b111e66e94effcc2b8eb2f79fe9670b/hci-uwb},
booktitle = {Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology},
doi = {10.1145/3641825.3687711},
interhash = {5f8ac3ec892e5a12c88a39d1e6965e66},
intrahash = {9b111e66e94effcc2b8eb2f79fe9670b},
isbn = {9798400705359},
keywords = {hci-uwb myown schach schell xrhub},
number = 19,
pages = {1-11},
publisher = {Association for Computing Machinery},
series = {VRST '24},
timestamp = {2024-12-09T13:37:05.000+0100},
title = {Motion Passwords},
url = {https://doi.org/10.1145/3641825.3687711},
venue = {Trier, Germany},
year = 2024
}