AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Lv, J.; Slowik, A.; Rani, S.; Kim, B. -G.; Chen, C. -M.; Kumari, S.; Li, K.; Lyu, X.; Jiang, H.
In: Research, vol. 8, 2025, ISSN: 20965168 (ISSN).
Abstract | Links | BibTeX | Tags: Adaptive fusion, Collaborative representations, Diagnosis, Electronic health record, Generative adversarial networks, Health care application, Healthcare environments, Immersive, Learning frameworks, Metaverses, Multi-modal, Multi-modal learning, Performance
@article{lv_multimodal_2025,
title = {Multimodal Metaverse Healthcare: A Collaborative Representation and Adaptive Fusion Approach for Generative Artificial-Intelligence-Driven Diagnosis},
author = {J. Lv and A. Slowik and S. Rani and B. -G. Kim and C. -M. Chen and S. Kumari and K. Li and X. Lyu and H. Jiang},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-86000613924&doi=10.34133%2fresearch.0616&partnerID=40&md5=fdc8ae3b29db905105dada9a5657b54b},
doi = {10.34133/research.0616},
issn = {20965168 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Research},
volume = {8},
abstract = {The metaverse enables immersive virtual healthcare environments, presenting opportunities for enhanced care delivery. A key challenge lies in effectively combining multimodal healthcare data and generative artificial intelligence abilities within metaverse-based healthcare applications, which is a problem that needs to be addressed. This paper proposes a novel multimodal learning framework for metaverse healthcare, MMLMH, based on collaborative intra- and intersample representation and adaptive fusion. Our framework introduces a collaborative representation learning approach that captures shared and modality-specific features across text, audio, and visual health data. By combining modality-specific and shared encoders with carefully formulated intrasample and intersample collaboration mechanisms, MMLMH achieves superior feature representation for complex health assessments. The framework’s adaptive fusion approach, utilizing attention mechanisms and gated neural networks, demonstrates robust performance across varying noise levels and data quality conditions. Experiments on metaverse healthcare datasets demonstrate MMLMH’s superior performance over baseline methods across multiple evaluation metrics. Longitudinal studies and visualization further illustrate MMLMH’s adaptability to evolving virtual environments and balanced performance across diagnostic accuracy, patient–system interaction efficacy, and data integration complexity. The proposed framework has a unique advantage in that a similar level of performance is maintained across various patient populations and virtual avatars, which could lead to greater personalization of healthcare experiences in the metaverse. MMLMH’s successful functioning in such complicated circumstances suggests that it can combine and process information streams from several sources. They can be successfully utilized in next-generation healthcare delivery through virtual reality. © 2025 Jianhui Lv et al.},
keywords = {Adaptive fusion, Collaborative representations, Diagnosis, Electronic health record, Generative adversarial networks, Health care application, Healthcare environments, Immersive, Learning frameworks, Metaverses, Multi-modal, Multi-modal learning, Performance},
pubstate = {published},
tppubtype = {article}
}
2024
Wang, Y.; Zhang, Y.
Enhancing Cognitive Recall in Dementia Patients: Integrating Generative AI with Virtual Reality for Behavioral and Memory Rehabilitation Proceedings Article
In: ACM Int. Conf. Proc. Ser., pp. 86–91, Association for Computing Machinery, 2024, ISBN: 979-840071806-9 (ISBN).
Abstract | Links | BibTeX | Tags: AI, Cognitive rehabilitation, Cognitive stimulations, Dementia patients, Electronic health record, Firebase, Generalisation, Neurodegenerative diseases, Non visuals, Patient rehabilitation, Rehabilitation projects, Virtual environments, Virtual Reality, Virtual-reality environment, Visual memory, Visual-spatial, VR
@inproceedings{wang_enhancing_2024,
title = {Enhancing Cognitive Recall in Dementia Patients: Integrating Generative AI with Virtual Reality for Behavioral and Memory Rehabilitation},
author = {Y. Wang and Y. Zhang},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85205444838&doi=10.1145%2f3686540.3686552&partnerID=40&md5=1577754660fddd936254fc78586e6a17},
doi = {10.1145/3686540.3686552},
isbn = {979-840071806-9 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {ACM Int. Conf. Proc. Ser.},
pages = {86–91},
publisher = {Association for Computing Machinery},
abstract = {In this Project, we developed a cognitive rehabilitation program for dementia patients, leveraging generative AI and virtual reality (VR) to evoke personal memories [4]. Integrating Open AI, DreamStudio, and Unity, our system allows patients to input descriptions, generating visual memories in a VR environment [5]. In trials, 85% of AI-generated images matched patients' expectations, although some inaccuracies arose from AI generalizations. Further validation with dementia patients is needed to assess memory recovery impacts. This novel approach modernizes Cognitive Stimulation Therapy (CST), traditionally reliant on non-visual exercises, by incorporating AI and VR to enhance memory recall and visual-spatial skills. While the world is developing more and more into Artificial Intelligence (AI) and Virtual Reality (VR), our program successfully coordinates them to help stimulate dementia patients' brains and perform the memory recall and visual spatial aspects of CST. © 2024 Copyright held by the owner/author(s).},
keywords = {AI, Cognitive rehabilitation, Cognitive stimulations, Dementia patients, Electronic health record, Firebase, Generalisation, Neurodegenerative diseases, Non visuals, Patient rehabilitation, Rehabilitation projects, Virtual environments, Virtual Reality, Virtual-reality environment, Visual memory, Visual-spatial, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Greca, A. D.; Amaro, I.; Barra, P.; Rosapepe, E.; Tortora, G.
Enhancing therapeutic engagement in Mental Health through Virtual Reality and Generative AI: A co-creation approach to trust building Proceedings Article
In: M., Cannataro; H., Zheng; L., Gao; J., Cheng; J.L., Miranda; E., Zumpano; X., Hu; Y.-R., Cho; T., Park (Ed.): Proc. - IEEE Int. Conf. Bioinform. Biomed., BIBM, pp. 6805–6811, Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 979-835038622-6 (ISBN).
Abstract | Links | BibTeX | Tags: Co-creation, Electronic health record, Fundamental component, Generative adversarial networks, Generative AI, generative artificial intelligence, Immersive, Mental health, Personalized therapies, Personalized Therapy, Three-dimensional object, Trust, Trust building, Virtual environments, Virtual Reality, Virtual Reality (VR)
@inproceedings{greca_enhancing_2024,
title = {Enhancing therapeutic engagement in Mental Health through Virtual Reality and Generative AI: A co-creation approach to trust building},
author = {A. D. Greca and I. Amaro and P. Barra and E. Rosapepe and G. Tortora},
editor = {Cannataro M. and Zheng H. and Gao L. and Cheng J. and Miranda J.L. and Zumpano E. and Hu X. and Cho Y.-R. and Park T.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85217278235&doi=10.1109%2fBIBM62325.2024.10822177&partnerID=40&md5=ed42f7ca6a0e52e9945402e2c439a7f0},
doi = {10.1109/BIBM62325.2024.10822177},
isbn = {979-835038622-6 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Proc. - IEEE Int. Conf. Bioinform. Biomed., BIBM},
pages = {6805–6811},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {Trust is a fundamental component of effective therapeutic relationships, significantly influencing patient engagement and treatment outcomes in mental health care. This paper presents a preliminary study aimed at enhancing trust through the co-creation of virtual therapeutic environments using generative artificial intelligence (AI). We propose a multimodal AI model, integrated into a virtual reality (VR) platform developed in Unity, which generates three-dimensional (3D) objects from textual descriptions. This approach allows patients to actively participate in shaping their therapeutic environment, fostering a collaborative atmosphere that enhances trust between patients and therapists. The methodology is structured into four phases, combining non-immersive and immersive experiences to co-create personalized therapeutic spaces and 3D objects symbolizing emotional or psychological states. Preliminary results demonstrate the system's potential in improving the therapeutic process through the real-time creation of virtual objects that reflect patient needs, with high-quality mesh generation and semantic coherence. This work offers new possibilities for patient-centered care in mental health services, suggesting that virtual co-creation can improve therapeutic efficacy by promoting trust and emotional engagement. © 2024 IEEE.},
keywords = {Co-creation, Electronic health record, Fundamental component, Generative adversarial networks, Generative AI, generative artificial intelligence, Immersive, Mental health, Personalized therapies, Personalized Therapy, Three-dimensional object, Trust, Trust building, Virtual environments, Virtual Reality, Virtual Reality (VR)},
pubstate = {published},
tppubtype = {inproceedings}
}