The global prevalence of mental health disorders has created a substantial treatment gap. To support clinicians and increase access to care, researchers in the field of Artificial Intelligence (AI) and Virtual Reality (VR) have investigated technology-mediated psychotherapy for years.
However, research about stakeholders' concerns and their readiness to use AI in psychotherapy remains scarce. This study focuses on a user-centered approach to accommodate patients' concerns and, based on the results, implement measures to foster self-disclosure and trust towards an embodied AI therapist in VR.
First, we conducted an online study with mental health patients ($N = 152$), which identified data autonomy and transparency as their primary ethical concerns. In a subsequent in-person VR study ($N = 90$) we compared effects of increased data autonomy and transparency on self-disclosure and trust towards an embodied AI therapist.
Results indicated that higher data autonomy led to greater self-disclosure, while transparency had no significant effect. Manipulating data autonomy and transparency did not affect perceived trust, though exploratory calculations revealed that women reported significantly higher trust levels than men. These findings illuminate patients' priorities and provide implications for technical designs for AI-driven mental health care.
%0 Journal Article
%1 obremski2026healed
%A Obremski, David
%A Friedrich, Paula
%A Wienrich, Carolin
%D 2026
%J IEEE Transactions on Visualization and Computer Graphics
%K c.wienrich ieeevr26 myown obremski piis xrhub
%T To be Healed or Hacked? - User‑Centered Ethical Design for Embodied AI in Mental Health Care
%X The global prevalence of mental health disorders has created a substantial treatment gap. To support clinicians and increase access to care, researchers in the field of Artificial Intelligence (AI) and Virtual Reality (VR) have investigated technology-mediated psychotherapy for years.
However, research about stakeholders' concerns and their readiness to use AI in psychotherapy remains scarce. This study focuses on a user-centered approach to accommodate patients' concerns and, based on the results, implement measures to foster self-disclosure and trust towards an embodied AI therapist in VR.
First, we conducted an online study with mental health patients ($N = 152$), which identified data autonomy and transparency as their primary ethical concerns. In a subsequent in-person VR study ($N = 90$) we compared effects of increased data autonomy and transparency on self-disclosure and trust towards an embodied AI therapist.
Results indicated that higher data autonomy led to greater self-disclosure, while transparency had no significant effect. Manipulating data autonomy and transparency did not affect perceived trust, though exploratory calculations revealed that women reported significantly higher trust levels than men. These findings illuminate patients' priorities and provide implications for technical designs for AI-driven mental health care.
@article{obremski2026healed,
abstract = {The global prevalence of mental health disorders has created a substantial treatment gap. To support clinicians and increase access to care, researchers in the field of Artificial Intelligence (AI) and Virtual Reality (VR) have investigated technology-mediated psychotherapy for years.
However, research about stakeholders' concerns and their readiness to use AI in psychotherapy remains scarce. This study focuses on a user-centered approach to accommodate patients' concerns and, based on the results, implement measures to foster self-disclosure and trust towards an embodied AI therapist in VR.
First, we conducted an online study with mental health patients ($N = 152$), which identified data autonomy and transparency as their primary ethical concerns. In a subsequent in-person VR study ($N = 90$) we compared effects of increased data autonomy and transparency on self-disclosure and trust towards an embodied AI therapist.
Results indicated that higher data autonomy led to greater self-disclosure, while transparency had no significant effect. Manipulating data autonomy and transparency did not affect perceived trust, though exploratory calculations revealed that women reported significantly higher trust levels than men. These findings illuminate patients' priorities and provide implications for technical designs for AI-driven mental health care.},
added-at = {2026-02-09T10:06:15.000+0100},
author = {Obremski, David and Friedrich, Paula and Wienrich, Carolin},
biburl = {https://www.bibsonomy.org/bibtex/27f5f262359ae6490ab691d2c0039dbd6/hci-uwb},
interhash = {2e0630b012f9f1c481f9d623707ae948},
intrahash = {7f5f262359ae6490ab691d2c0039dbd6},
journal = {IEEE Transactions on Visualization and Computer Graphics},
keywords = {c.wienrich ieeevr26 myown obremski piis xrhub},
note = {To be published},
timestamp = {2026-03-09T13:56:21.000+0100},
title = {To be Healed or Hacked? - User‑Centered Ethical Design for Embodied AI in Mental Health Care},
year = 2026
}