LLM-based embodied agents are recently emerging in VR supporting various scenarios such as pedagogical assistants, virtual companions, and NPCs for games. While they have potential to enhance user interactions, they require careful design to cater unique
user needs and contexts. We present an architecture that leverages different LLM modules, and their engineering integration to enable conversational interactions with an embodied agent in multi-user VR. Our system’s primary goal is to facilitate immersive tailoring through conversational input, allowing users to dynamically adjust an agent’s behavior and properties (e.g., role, personality, and appearance), directly within the virtual space, rather than during development or via separate interfaces. We demonstrate this approach with a use case and provide performance measurements in terms of latency of ailoring.
%0 Conference Paper
%1 bellucci2025immersive
%A Bellucci, Andrea
%A Jacucci, Giulio
%A Duong, Kien
%A K Das, Pritom
%A Smirnov, Sergei
%A Ahmed, Imtiaj
%A Lugrin, Jean-Luc
%B Proceedings of the The 32nd IEEE Conference on Virtual Reality and 3D User Interfaces
%D 2025
%K hci-uwb myown
%T Immersive Tailoring of Embodied Agents Using Large Language Models
%X LLM-based embodied agents are recently emerging in VR supporting various scenarios such as pedagogical assistants, virtual companions, and NPCs for games. While they have potential to enhance user interactions, they require careful design to cater unique
user needs and contexts. We present an architecture that leverages different LLM modules, and their engineering integration to enable conversational interactions with an embodied agent in multi-user VR. Our system’s primary goal is to facilitate immersive tailoring through conversational input, allowing users to dynamically adjust an agent’s behavior and properties (e.g., role, personality, and appearance), directly within the virtual space, rather than during development or via separate interfaces. We demonstrate this approach with a use case and provide performance measurements in terms of latency of ailoring.
@inproceedings{bellucci2025immersive,
abstract = {LLM-based embodied agents are recently emerging in VR supporting various scenarios such as pedagogical assistants, virtual companions, and NPCs for games. While they have potential to enhance user interactions, they require careful design to cater unique
user needs and contexts. We present an architecture that leverages different LLM modules, and their engineering integration to enable conversational interactions with an embodied agent in multi-user VR. Our system’s primary goal is to facilitate immersive tailoring through conversational input, allowing users to dynamically adjust an agent’s behavior and properties (e.g., role, personality, and appearance), directly within the virtual space, rather than during development or via separate interfaces. We demonstrate this approach with a use case and provide performance measurements in terms of latency of ailoring.
},
added-at = {2025-02-27T05:51:34.000+0100},
author = {Bellucci, Andrea and Jacucci, Giulio and Duong, Kien and K Das, Pritom and Smirnov, Sergei and Ahmed, Imtiaj and Lugrin, Jean-Luc},
biburl = {https://www.bibsonomy.org/bibtex/2f3728a0b1181eaf07cbf0684cacc0a04/hci-uwb},
booktitle = {Proceedings of the The 32nd IEEE Conference on Virtual Reality and 3D User Interfaces},
interhash = {379dc796ea3c4195758b735ea1a8be14},
intrahash = {f3728a0b1181eaf07cbf0684cacc0a04},
keywords = {hci-uwb myown},
series = {IEEE VR 2025},
timestamp = {2025-03-14T15:22:31.000+0100},
title = {Immersive Tailoring of Embodied Agents Using Large Language Models},
year = 2025
}