AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Weerasinghe, M.; Kljun, M.; Pucihar, K. Č.
A Cross-Device Interaction with the Smartphone and HMD for Vocabulary Learning Proceedings Article
In: L., Zaina; J.C., Campos; D., Spano; K., Luyten; P., Palanque; G., Veer; A., Ebert; S.R., Humayoun; V., Memmesheimer (Ed.): Lect. Notes Comput. Sci., pp. 269–282, Springer Science and Business Media Deutschland GmbH, 2025, ISBN: 03029743 (ISSN); 978-303191759-2 (ISBN).
Abstract | Links | BibTeX | Tags: Augmented Reality, Context-based, Context-based vocabulary learning, Cross-reality interaction, Engineering education, Head-mounted displays, Head-mounted-displays, Images synthesis, Keyword method, Mixed reality, Smart phones, Smartphones, Students, Text-to-image synthesis, Visualization, Vocabulary learning
@inproceedings{weerasinghe_cross-device_2025,
title = {A Cross-Device Interaction with the Smartphone and HMD for Vocabulary Learning},
author = {M. Weerasinghe and M. Kljun and K. Č. Pucihar},
editor = {Zaina L. and Campos J.C. and Spano D. and Luyten K. and Palanque P. and Veer G. and Ebert A. and Humayoun S.R. and Memmesheimer V.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105007828696&doi=10.1007%2f978-3-031-91760-8_18&partnerID=40&md5=4ebf202715ba880dcfeb3232dba7e2c4},
doi = {10.1007/978-3-031-91760-8_18},
isbn = {03029743 (ISSN); 978-303191759-2 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Lect. Notes Comput. Sci.},
volume = {15518 LNCS},
pages = {269–282},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {Cross-reality (XR) systems facilitate interaction between devices with differing levels of virtual content. By engaging with a variety of such devices, XR systems offer the flexibility to choose the most suitable modality for specific task or context. This capability enables rich applications in training and education, including vocabulary learning. Vocabulary acquisition is a vital part of language learning, employing techniques such as words rehearsing, flashcards, labelling environments with post-it notes, and mnemonic strategies such as the keyword method. Traditional mnemonics typically rely on visual stimuli or mental visualisations. Recent research highlights that AR can enhance vocabulary learning by combining real objects with augmented stimuli such as in labelling environments. Additionally,advancements in generative AI now enable high-quality, synthetically generated images from text descriptions, facilitating externalisation of personalised visual stimuli of mental visualisations. However, creating interfaces for effective real-world augmentation remains challenging, particularly given the limited text input capabilities of Head-Mounted Displays (HMDs). This work presents an XR system that combines smartphones and HMDs by leveraging Augmented Reality (AR) for contextually relevant information and a smartphone for efficient text input. The system enables users to visually annotate objects with personalised images of keyword associations generated with DALL-E 2. To evaluate the system, we conducted a user study with 16 university graduate students, assessing both usability and overall user experience. © The Author(s), under exclusive license to Springer Nature Switzerland AG 2025.},
keywords = {Augmented Reality, Context-based, Context-based vocabulary learning, Cross-reality interaction, Engineering education, Head-mounted displays, Head-mounted-displays, Images synthesis, Keyword method, Mixed reality, Smart phones, Smartphones, Students, Text-to-image synthesis, Visualization, Vocabulary learning},
pubstate = {published},
tppubtype = {inproceedings}
}
Buldu, K. B.; Özdel, S.; Lau, K. H. Carrie; Wang, M.; Saad, D.; Schönborn, S.; Boch, A.; Kasneci, E.; Bozkir, E.
CUIfy the XR: An Open-Source Package to Embed LLM-Powered Conversational Agents in XR Proceedings Article
In: Proc. - IEEE Int. Conf. Artif. Intell. Ext. Virtual Real., AIxVR, pp. 192–197, Institute of Electrical and Electronics Engineers Inc., 2025, ISBN: 979-833152157-8 (ISBN).
Abstract | Links | BibTeX | Tags: Augmented Reality, Computational Linguistics, Conversational user interface, conversational user interfaces, Extended reality, Head-mounted-displays, Helmet mounted displays, Language Model, Large language model, large language models, Non-player character, non-player characters, Open source software, Personnel training, Problem oriented languages, Speech models, Speech-based interaction, Text to speech, Unity, Virtual environments, Virtual Reality
@inproceedings{buldu_cuify_2025,
title = {CUIfy the XR: An Open-Source Package to Embed LLM-Powered Conversational Agents in XR},
author = {K. B. Buldu and S. Özdel and K. H. Carrie Lau and M. Wang and D. Saad and S. Schönborn and A. Boch and E. Kasneci and E. Bozkir},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105000229165&doi=10.1109%2fAIxVR63409.2025.00037&partnerID=40&md5=837b0e3425d2e5a9358bbe6c8ecb5754},
doi = {10.1109/AIxVR63409.2025.00037},
isbn = {979-833152157-8 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Proc. - IEEE Int. Conf. Artif. Intell. Ext. Virtual Real., AIxVR},
pages = {192–197},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {Recent developments in computer graphics, machine learning, and sensor technologies enable numerous opportunities for extended reality (XR) setups for everyday life, from skills training to entertainment. With large corporations offering affordable consumer-grade head-mounted displays (HMDs), XR will likely become pervasive, and HMDs will develop as personal devices like smartphones and tablets. However, having intelligent spaces and naturalistic interactions in XR is as important as tech-nological advances so that users grow their engagement in virtual and augmented spaces. To this end, large language model (LLM)-powered non-player characters (NPCs) with speech-to-text (STT) and text-to-speech (TTS) models bring significant advantages over conventional or pre-scripted NPCs for facilitating more natural conversational user interfaces (CUIs) in XR. This paper provides the community with an open-source, customizable, extendable, and privacy-aware Unity package, CUIfy, that facili-tates speech-based NPC-user interaction with widely used LLMs, STT, and TTS models. Our package also supports multiple LLM-powered NPCs per environment and minimizes latency between different computational models through streaming to achieve us-able interactions between users and NPCs. We publish our source code in the following repository: https://gitlab.lrz.de/hctl/cuify © 2025 IEEE.},
keywords = {Augmented Reality, Computational Linguistics, Conversational user interface, conversational user interfaces, Extended reality, Head-mounted-displays, Helmet mounted displays, Language Model, Large language model, large language models, Non-player character, non-player characters, Open source software, Personnel training, Problem oriented languages, Speech models, Speech-based interaction, Text to speech, Unity, Virtual environments, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
2024
Wong, A.; Zhao, Y.; Baghaei, N.
Effects of Customizable Intelligent VR Shopping Assistant on Shopping for Stress Relief Proceedings Article
In: U., Eck; M., Sra; J., Stefanucci; M., Sugimoto; M., Tatzgern; I., Williams (Ed.): Proc. - IEEE Int. Symp. Mixed Augment. Real. Adjunct, ISMAR-Adjunct, pp. 304–308, Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 979-833150691-9 (ISBN).
Abstract | Links | BibTeX | Tags: Customisation, Customizable, generative artificial intelligence, Head-mounted-displays, Helmet mounted displays, Immersive, Mental health, mHealth, Realistic rendering, stress, Stress relief, Users' experiences, Virtual environments, Virtual Reality, Virtual shopping, Virtual shopping assistant
@inproceedings{wong_effects_2024,
title = {Effects of Customizable Intelligent VR Shopping Assistant on Shopping for Stress Relief},
author = {A. Wong and Y. Zhao and N. Baghaei},
editor = {Eck U. and Sra M. and Stefanucci J. and Sugimoto M. and Tatzgern M. and Williams I.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85214427097&doi=10.1109%2fISMAR-Adjunct64951.2024.00069&partnerID=40&md5=1530bc0a2139fb33b1a2917c3eb31296},
doi = {10.1109/ISMAR-Adjunct64951.2024.00069},
isbn = {979-833150691-9 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Proc. - IEEE Int. Symp. Mixed Augment. Real. Adjunct, ISMAR-Adjunct},
pages = {304–308},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {Shopping has long since been a method of distraction and relieving stress. Virtual Reality (VR) effectively simulates immersive experiences, including shopping through head-mounted displays (HMD), which create an environment through realistic renderings and sounds. Current studies in VR have shown that assistants can support users by reducing stress, indicating their ability to improve mental health within VR. Customization and personalization have also been used to enhance the user experience with users preferring the tailored experience and leading to a greater sense of immersion. There is a gap in knowledge on the effects of customization on a VR assistant's ability to reduce stress within the VR retailing space. This research aims to identify relationships between customization and shopping assistants within VR to better understand its effects on the user experience. Understanding this will help the development of VR assistants for mental health and consumer-ready VR shopping experiences. © 2024 IEEE.},
keywords = {Customisation, Customizable, generative artificial intelligence, Head-mounted-displays, Helmet mounted displays, Immersive, Mental health, mHealth, Realistic rendering, stress, Stress relief, Users' experiences, Virtual environments, Virtual Reality, Virtual shopping, Virtual shopping assistant},
pubstate = {published},
tppubtype = {inproceedings}
}