AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Casas, L.; Hannah, S.; Mitchell, K.
HoloJig: Interactive Spoken Prompt Specified Generative AI Environments Journal Article
In: IEEE Computer Graphics and Applications, vol. 45, no. 2, pp. 69–77, 2025, ISSN: 02721716 (ISSN).
Abstract | Links | BibTeX | Tags: 3-D rendering, Article, Collaborative workspace, customer experience, Economic and social effects, generative artificial intelligence, human, Immersive, Immersive environment, parallax, Real- time, simulation, Simulation training, speech, Time based, Virtual environments, Virtual Reality, Virtual reality experiences, Virtual spaces, VR systems
@article{casas_holojig_2025,
title = {HoloJig: Interactive Spoken Prompt Specified Generative AI Environments},
author = {L. Casas and S. Hannah and K. Mitchell},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105001182100&doi=10.1109%2fMCG.2025.3553780&partnerID=40&md5=ec5dc44023314b6f9221169357d81dcd},
doi = {10.1109/MCG.2025.3553780},
issn = {02721716 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {45},
number = {2},
pages = {69–77},
abstract = {HoloJig offers an interactive, speech-to-virtual reality (VR), VR experience that generates diverse environments in real time based on live spoken descriptions. Unlike traditional VR systems that rely on prebuilt assets, HoloJig dynamically creates personalized and immersive virtual spaces with depth-based parallax 3-D rendering, allowing users to define the characteristics of their immersive environment through verbal prompts. This generative approach opens up new possibilities for interactive experiences, including simulations, training, collaborative workspaces, and entertainment. In addition to speech-to-VR environment generation, a key innovation of HoloJig is its progressive visual transition mechanism, which smoothly dissolves between previously generated and newly requested environments, mitigating the delay caused by neural computations. This feature ensures a seamless and continuous user experience, even as new scenes are being rendered on remote servers. © 1981-2012 IEEE.},
keywords = {3-D rendering, Article, Collaborative workspace, customer experience, Economic and social effects, generative artificial intelligence, human, Immersive, Immersive environment, parallax, Real- time, simulation, Simulation training, speech, Time based, Virtual environments, Virtual Reality, Virtual reality experiences, Virtual spaces, VR systems},
pubstate = {published},
tppubtype = {article}
}
Logothetis, I.; Diakogiannis, K.; Vidakis, N.
Interactive Learning Through Conversational Avatars and Immersive VR: Enhancing Diabetes Education and Self-Management Proceedings Article
In: X., Fang (Ed.): Lect. Notes Comput. Sci., pp. 415–429, Springer Science and Business Media Deutschland GmbH, 2025, ISBN: 03029743 (ISSN); 978-303192577-1 (ISBN).
Abstract | Links | BibTeX | Tags: Artificial intelligence, Chronic disease, Computer aided instruction, Diabetes Education, Diagnosis, E-Learning, Education management, Engineering education, Gamification, Immersive virtual reality, Interactive computer graphics, Interactive learning, Large population, Learning systems, NUI, Self management, Serious game, Serious games, simulation, Virtual Reality
@inproceedings{logothetis_interactive_2025,
title = {Interactive Learning Through Conversational Avatars and Immersive VR: Enhancing Diabetes Education and Self-Management},
author = {I. Logothetis and K. Diakogiannis and N. Vidakis},
editor = {Fang X.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105008266480&doi=10.1007%2f978-3-031-92578-8_27&partnerID=40&md5=451274dfa3ef0b3f1b39c7d5a665ee3b},
doi = {10.1007/978-3-031-92578-8_27},
isbn = {03029743 (ISSN); 978-303192577-1 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Lect. Notes Comput. Sci.},
volume = {15816 LNCS},
pages = {415–429},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {Diabetes is a chronic disease affecting a large population of the world. Education and self-management of diabetes are crucial. Technologies such as Virtual Reality (VR) have presented promising results in healthcare education, while studies suggest that Artificial Intelligence (AI) can help in learning by further engaging the learner. This study aims to educate users on the entire routine of managing diabetes. The serious game utilizes VR for realistic interaction with diabetes tools and generative AI through a conversational avatar that acts as an assistant instructor. In this way, it allows users to practice diagnostic and therapeutic interventions in a controlled virtual environment, helping to build their understanding and confidence in diabetes management. To measure the effects of the proposed serious game, presence, and perceived agency were measured. Preliminary results indicate that this setup aids in the engagement and immersion of learners, while the avatar can provide helpful information during gameplay. © The Author(s), under exclusive license to Springer Nature Switzerland AG 2025.},
keywords = {Artificial intelligence, Chronic disease, Computer aided instruction, Diabetes Education, Diagnosis, E-Learning, Education management, Engineering education, Gamification, Immersive virtual reality, Interactive computer graphics, Interactive learning, Large population, Learning systems, NUI, Self management, Serious game, Serious games, simulation, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
2024
Stuart, J.; Stephen, A.; Aul, K.; Bumbach, M. D.; Huffman, S.; Russo, B.; Lok, B.
Developing augmented reality filters to display visual cues on diverse skin tones Journal Article
In: Frontiers in Virtual Reality, vol. 5, 2024, ISSN: 26734192 (ISSN).
Abstract | Links | BibTeX | Tags: Augmented Reality, fidelity, Healthcare, realism, simulation, symptoms, visual cue training
@article{stuart_developing_2024,
title = {Developing augmented reality filters to display visual cues on diverse skin tones},
author = {J. Stuart and A. Stephen and K. Aul and M. D. Bumbach and S. Huffman and B. Russo and B. Lok},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85198640398&doi=10.3389%2ffrvir.2024.1363193&partnerID=40&md5=33470de917a5e9979f77fd42f25614eb},
doi = {10.3389/frvir.2024.1363193},
issn = {26734192 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Frontiers in Virtual Reality},
volume = {5},
abstract = {Introduction: Variations in skin tone can significantly alter the appearance of symptoms such as rashes or bruises. Unfortunately, previous works utilizing Augmented Reality (AR) in simulating visual symptoms have often failed to consider this critical aspect, potentially leading to inadequate training and education. This study seeks to address this gap by integrating generative artificial intelligence (AI) into the AR filter design process. Methods: We conducted a 2 × 5 within-subjects study with second-year nursing students (N = 117) from the University of Florida. The study manipulated two factors: symptom generation style and skin tone. Symptom generation style was manipulated using a filter based on a real symptom image or a filter based on a computer-generated symptom image. Skin tone variations were created by applying AR filters to computer-generated images of faces with five skin tones ranging from light to dark. To control for factors like lighting or 3D tracking, 101 pre-generated images were created for each condition, representing a range of filter transparency levels (0–100). Participants used visual analog scales on a computer screen to adjust the symptom transparency in the images until they observed image changes and distinct symptom patterns. Participants also rated the realism of each condition and provided feedback on how the symptom style and skin tone impacted their perceptions. Results: Students rated the symptoms displayed by the computer-generated AR filters as marginally more realistic than those displayed by the real image AR filters. However, students identified symptoms earlier with the real-image filters. Additionally, SET-M and Theory of Planned Behavior questions indicate that the activity increased students’ feelings of confidence and self-efficacy. Finally, we found that similar to the real world, where symptoms on dark skin tones are identified at later stages of development, students identified symptoms at later stages as skin tone darkened regardless of cue type. Conclusion: This work implemented a novel approach to develop AR filters that display time-based visual cues on diverse skin tones. Additionally, this work provides evidence-based recommendations on how and when generative AI-based AR filters can be effectively used in healthcare education. Copyright © 2024 Stuart, Stephen, Aul, Bumbach, Huffman, Russo and Lok.},
keywords = {Augmented Reality, fidelity, Healthcare, realism, simulation, symptoms, visual cue training},
pubstate = {published},
tppubtype = {article}
}
Shrestha, A.; Imamoto, K.
Generative AI based industrial metaverse creation methodology Proceedings Article
In: Proc. - Artif. Intell. Bus., AIxB, pp. 53–57, Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 979-835039103-9 (ISBN).
Abstract | Links | BibTeX | Tags: Generative adversarial networks, Generative AI, Industrial metaverse, Industrial railroads, Investments, Maintenance and operation, Metaverses, Natural languages, Railroad transportation, Railway, Railway maintenance, Railway operations, Simple++, simulation
@inproceedings{shrestha_generative_2024,
title = {Generative AI based industrial metaverse creation methodology},
author = {A. Shrestha and K. Imamoto},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85215066217&doi=10.1109%2fAIxB62249.2024.00017&partnerID=40&md5=d6d11729f16ccaa9f69fd5452befe492},
doi = {10.1109/AIxB62249.2024.00017},
isbn = {979-835039103-9 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Proc. - Artif. Intell. Bus., AIxB},
pages = {53–57},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {The metaverse has been proposed as a suitable apparatus for the dissemination of information in a railway maintenance and operation context. However, the generation of such a metaverse environment requires significant investment with the creation of simple prototypes taking an extended duration. Although there are generative artificial intelligencebased methods to create small scenes, there is an absence of a method to do so for industrial applications. We devised a platform to create railway environments with the assistance of the language models for code creation and semantic inference without the need for reprogramming or editing of the project source meaning environments could be generated by the end users. With a natural language input and a coding paradigm output the code generation module is shown together with the example environments from real-life railway lines in Tokyo, Japan as preliminary results. By creating such environments leveraging the rapid generation with the help of generative artificial intelligence, we show generative artificial intelligence can be used to automate the task of the programmer to create new environments on demand from the user in natural language. © 2024 IEEE.},
keywords = {Generative adversarial networks, Generative AI, Industrial metaverse, Industrial railroads, Investments, Maintenance and operation, Metaverses, Natural languages, Railroad transportation, Railway, Railway maintenance, Railway operations, Simple++, simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Chandrashekar, N. Donekal; Lee, A.; Azab, M.; Gracanin, D.
Understanding User Behavior for Enhancing Cybersecurity Training with Immersive Gamified Platforms Journal Article
In: Information (Switzerland), vol. 15, no. 12, 2024, ISSN: 20782489 (ISSN).
Abstract | Links | BibTeX | Tags: Artificial intelligence, Critical infrastructures, Cyber attacks, Cyber security, Cyber systems, Cyber-attacks, Cybersecurity, Decisions makings, Digital infrastructures, digital twin, Extended reality, Gamification, Immersive, Network Security, simulation, Technical vulnerabilities, Training, user behavior, User behaviors
@article{donekal_chandrashekar_understanding_2024,
title = {Understanding User Behavior for Enhancing Cybersecurity Training with Immersive Gamified Platforms},
author = {N. Donekal Chandrashekar and A. Lee and M. Azab and D. Gracanin},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85213435167&doi=10.3390%2finfo15120814&partnerID=40&md5=134c43c7238bae4923468bc6e46c860d},
doi = {10.3390/info15120814},
issn = {20782489 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Information (Switzerland)},
volume = {15},
number = {12},
abstract = {In modern digital infrastructure, cyber systems are foundational, making resilience against sophisticated attacks essential. Traditional cybersecurity defenses primarily address technical vulnerabilities; however, the human element, particularly decision-making during cyber attacks, adds complexities that current behavioral studies fail to capture adequately. Existing approaches, including theoretical models, game theory, and simulators, rely on retrospective data and static scenarios. These methods often miss the real-time, context-specific nature of user responses during cyber threats. To address these limitations, this work introduces a framework that combines Extended Reality (XR) and Generative Artificial Intelligence (Gen-AI) within a gamified platform. This framework enables continuous, high-fidelity data collection on user behavior in dynamic attack scenarios. It includes three core modules: the Player Behavior Module (PBM), Gamification Module (GM), and Simulation Module (SM). Together, these modules create an immersive, responsive environment for studying user interactions. A case study in a simulated critical infrastructure environment demonstrates the framework’s effectiveness in capturing realistic user behaviors under cyber attack, with potential applications for improving response strategies and resilience across critical sectors. This work lays the foundation for adaptive cybersecurity training and user-centered development across critical infrastructure. © 2024 by the authors.},
keywords = {Artificial intelligence, Critical infrastructures, Cyber attacks, Cyber security, Cyber systems, Cyber-attacks, Cybersecurity, Decisions makings, Digital infrastructures, digital twin, Extended reality, Gamification, Immersive, Network Security, simulation, Technical vulnerabilities, Training, user behavior, User behaviors},
pubstate = {published},
tppubtype = {article}
}
Scott, A. J. S.; McCuaig, F.; Lim, V.; Watkins, W.; Wang, J.; Strachan, G.
Revolutionizing Nurse Practitioner Training: Integrating Virtual Reality and Large Language Models for Enhanced Clinical Education Proceedings Article
In: G., Strudwick; N.R., Hardiker; G., Rees; R., Cook; R., Cook; Y.J., Lee (Ed.): Stud. Health Technol. Informatics, pp. 671–672, IOS Press BV, 2024, ISBN: 09269630 (ISSN); 978-164368527-4 (ISBN).
Abstract | Links | BibTeX | Tags: 3D modeling, 3D models, 3d-modeling, adult, anamnesis, clinical decision making, clinical education, Clinical Simulation, Computational Linguistics, computer interface, Computer-Assisted Instruction, conference paper, Curriculum, Decision making, E-Learning, Education, Health care education, Healthcare Education, human, Humans, Language Model, Large language model, large language models, Mesh generation, Model animations, Modeling languages, nurse practitioner, Nurse Practitioners, Nursing, nursing education, nursing student, OSCE preparation, procedures, simulation, Teaching, therapy, Training, Training program, User-Computer Interface, Virtual Reality, Virtual reality training
@inproceedings{scott_revolutionizing_2024,
title = {Revolutionizing Nurse Practitioner Training: Integrating Virtual Reality and Large Language Models for Enhanced Clinical Education},
author = {A. J. S. Scott and F. McCuaig and V. Lim and W. Watkins and J. Wang and G. Strachan},
editor = {Strudwick G. and Hardiker N.R. and Rees G. and Cook R. and Cook R. and Lee Y.J.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85199593781&doi=10.3233%2fSHTI240272&partnerID=40&md5=90c7bd43ba978f942723e6cf1983ffb3},
doi = {10.3233/SHTI240272},
isbn = {09269630 (ISSN); 978-164368527-4 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Stud. Health Technol. Informatics},
volume = {315},
pages = {671–672},
publisher = {IOS Press BV},
abstract = {This project introduces an innovative virtual reality (VR) training program for student Nurse Practitioners, incorporating advanced 3D modeling, animation, and Large Language Models (LLMs). Designed to simulate realistic patient interactions, the program aims to improve communication, history taking, and clinical decision-making skills in a controlled, authentic setting. This abstract outlines the methods, results, and potential impact of this cutting-edge educational tool on nursing education. © 2024 The Authors.},
keywords = {3D modeling, 3D models, 3d-modeling, adult, anamnesis, clinical decision making, clinical education, Clinical Simulation, Computational Linguistics, computer interface, Computer-Assisted Instruction, conference paper, Curriculum, Decision making, E-Learning, Education, Health care education, Healthcare Education, human, Humans, Language Model, Large language model, large language models, Mesh generation, Model animations, Modeling languages, nurse practitioner, Nurse Practitioners, Nursing, nursing education, nursing student, OSCE preparation, procedures, simulation, Teaching, therapy, Training, Training program, User-Computer Interface, Virtual Reality, Virtual reality training},
pubstate = {published},
tppubtype = {inproceedings}
}