AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Koizumi, M.; Ohsuga, M.; Corchado, J. M.
Development and Assessment of a System to Help Students Improve Self-compassion Proceedings Article
In: R., Chinthaginjala; P., Sitek; N., Min-Allah; K., Matsui; S., Ossowski; S., Rodríguez (Ed.): Lect. Notes Networks Syst., pp. 43–52, Springer Science and Business Media Deutschland GmbH, 2025, ISBN: 23673370 (ISSN); 978-303182072-4 (ISBN).
Abstract | Links | BibTeX | Tags: Avatar, Generative adversarial networks, Generative AI, Health issues, Mental health, Self-compassion, Students, Training program, University students, Virtual avatar, Virtual environments, Virtual Reality, Virtual Space, Virtual spaces, Visual imagery
@inproceedings{koizumi_development_2025,
title = {Development and Assessment of a System to Help Students Improve Self-compassion},
author = {M. Koizumi and M. Ohsuga and J. M. Corchado},
editor = {Chinthaginjala R. and Sitek P. and Min-Allah N. and Matsui K. and Ossowski S. and Rodríguez S.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85218979175&doi=10.1007%2f978-3-031-82073-1_5&partnerID=40&md5=b136d4a114ce5acfa89f907ccecc145f},
doi = {10.1007/978-3-031-82073-1_5},
isbn = {23673370 (ISSN); 978-303182072-4 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Lect. Notes Networks Syst.},
volume = {1259},
pages = {43–52},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {Mental health issues are becoming more prevalent among university students. The mindful self-compassion (MSC) training program, which was introduced to address this issue, has shown some efficacy. However, many people, particularly Japanese people, have difficulty recalling visual imagery or feel uncomfortable or resistant to treating themselves with compassion. This study proposes and develops a system that uses virtual space and avatars to help individuals improve their self-compassion. In the proposed system, the user first selects an avatar of a person with whom to talk (hereafter referred to as “partner”), and then talks about the problem to the avatar of his/her choice. Next, the user changes viewpoints and listens to the problem as the partner’s avatar and responds with compassion. Finally, the user returns to his/her own avatar and listens to the compassionate response spoken as the partner avatar. We first conducted surveys to understand the important system components, and then developed prototypes. In light of the results of the experiments, we improved the prototype by introducing a generative AI. The first prototype used the user’s spoken voice as it was, but the improved system uses the generative AI to organize and convert the voice and present it. In addition, we added a function to generate and add advice with compression. The proposed system is expected to contribute to the improvement of students’ self-compassion. © The Author(s), under exclusive license to Springer Nature Switzerland AG 2025.},
keywords = {Avatar, Generative adversarial networks, Generative AI, Health issues, Mental health, Self-compassion, Students, Training program, University students, Virtual avatar, Virtual environments, Virtual Reality, Virtual Space, Virtual spaces, Visual imagery},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Y.; Pang, E. C. H.; Ng, C. S. Y.; Azim, M.; Leung, H.
Enhancing Linear Algebra Education with AI-Generated Content in the CityU Metaverse: A Comparative Study Proceedings Article
In: T., Hao; J.G., Wu; X., Luo; Y., Sun; Y., Mu; S., Ge; W., Xie (Ed.): Lect. Notes Comput. Sci., pp. 3–16, Springer Science and Business Media Deutschland GmbH, 2025, ISBN: 03029743 (ISSN); 978-981964406-3 (ISBN).
Abstract | Links | BibTeX | Tags: Comparatives studies, Digital age, Digital interactions, digital twin, Educational metaverse, Engineering education, Generative AI, Immersive, Matrix algebra, Metaverse, Metaverses, Personnel training, Students, Teaching, University campus, Virtual environments, virtual learning environment, Virtual learning environments, Virtual Reality, Virtualization
@inproceedings{li_enhancing_2025,
title = {Enhancing Linear Algebra Education with AI-Generated Content in the CityU Metaverse: A Comparative Study},
author = {Y. Li and E. C. H. Pang and C. S. Y. Ng and M. Azim and H. Leung},
editor = {Hao T. and Wu J.G. and Luo X. and Sun Y. and Mu Y. and Ge S. and Xie W.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003632691&doi=10.1007%2f978-981-96-4407-0_1&partnerID=40&md5=c067ba5d4c15e9c0353bf315680531fc},
doi = {10.1007/978-981-96-4407-0_1},
isbn = {03029743 (ISSN); 978-981964406-3 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Lect. Notes Comput. Sci.},
volume = {15589 LNCS},
pages = {3–16},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {In today’s digital age, the metaverse is emerging as the forthcoming evolution of the internet. It provides an immersive space that marks a new frontier in the way digital interactions are facilitated and experienced. In this paper, we present the CityU Metaverse, which aims to construct a digital twin of our university campus. It is designed as an educational virtual world where learning applications can be embedded in this virtual campus, supporting not only remote and collaborative learning but also professional technical training to enhance educational experiences through immersive and interactive learning. To evaluate the effectiveness of this educational metaverse, we conducted an experiment focused on 3D linear transformation in linear algebra, with teaching content generated by generative AI, comparing our metaverse system with traditional teaching methods. Knowledge tests and surveys assessing learning interest revealed that students engaged with the CityU Metaverse, facilitated by AI-generated content, outperformed those in traditional settings and reported greater enjoyment during the learning process. The work provides valuable perspectives on the behaviors and interactions within the metaverse by analyzing user preferences and learning outcomes. © The Author(s), under exclusive license to Springer Nature Singapore Pte Ltd. 2025.},
keywords = {Comparatives studies, Digital age, Digital interactions, digital twin, Educational metaverse, Engineering education, Generative AI, Immersive, Matrix algebra, Metaverse, Metaverses, Personnel training, Students, Teaching, University campus, Virtual environments, virtual learning environment, Virtual learning environments, Virtual Reality, Virtualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Weerasinghe, M.; Kljun, M.; Pucihar, K. Č.
A Cross-Device Interaction with the Smartphone and HMD for Vocabulary Learning Proceedings Article
In: L., Zaina; J.C., Campos; D., Spano; K., Luyten; P., Palanque; G., Veer; A., Ebert; S.R., Humayoun; V., Memmesheimer (Ed.): Lect. Notes Comput. Sci., pp. 269–282, Springer Science and Business Media Deutschland GmbH, 2025, ISBN: 03029743 (ISSN); 978-303191759-2 (ISBN).
Abstract | Links | BibTeX | Tags: Augmented Reality, Context-based, Context-based vocabulary learning, Cross-reality interaction, Engineering education, Head-mounted displays, Head-mounted-displays, Images synthesis, Keyword method, Mixed reality, Smart phones, Smartphones, Students, Text-to-image synthesis, Visualization, Vocabulary learning
@inproceedings{weerasinghe_cross-device_2025,
title = {A Cross-Device Interaction with the Smartphone and HMD for Vocabulary Learning},
author = {M. Weerasinghe and M. Kljun and K. Č. Pucihar},
editor = {Zaina L. and Campos J.C. and Spano D. and Luyten K. and Palanque P. and Veer G. and Ebert A. and Humayoun S.R. and Memmesheimer V.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105007828696&doi=10.1007%2f978-3-031-91760-8_18&partnerID=40&md5=4ebf202715ba880dcfeb3232dba7e2c4},
doi = {10.1007/978-3-031-91760-8_18},
isbn = {03029743 (ISSN); 978-303191759-2 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Lect. Notes Comput. Sci.},
volume = {15518 LNCS},
pages = {269–282},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {Cross-reality (XR) systems facilitate interaction between devices with differing levels of virtual content. By engaging with a variety of such devices, XR systems offer the flexibility to choose the most suitable modality for specific task or context. This capability enables rich applications in training and education, including vocabulary learning. Vocabulary acquisition is a vital part of language learning, employing techniques such as words rehearsing, flashcards, labelling environments with post-it notes, and mnemonic strategies such as the keyword method. Traditional mnemonics typically rely on visual stimuli or mental visualisations. Recent research highlights that AR can enhance vocabulary learning by combining real objects with augmented stimuli such as in labelling environments. Additionally,advancements in generative AI now enable high-quality, synthetically generated images from text descriptions, facilitating externalisation of personalised visual stimuli of mental visualisations. However, creating interfaces for effective real-world augmentation remains challenging, particularly given the limited text input capabilities of Head-Mounted Displays (HMDs). This work presents an XR system that combines smartphones and HMDs by leveraging Augmented Reality (AR) for contextually relevant information and a smartphone for efficient text input. The system enables users to visually annotate objects with personalised images of keyword associations generated with DALL-E 2. To evaluate the system, we conducted a user study with 16 university graduate students, assessing both usability and overall user experience. © The Author(s), under exclusive license to Springer Nature Switzerland AG 2025.},
keywords = {Augmented Reality, Context-based, Context-based vocabulary learning, Cross-reality interaction, Engineering education, Head-mounted displays, Head-mounted-displays, Images synthesis, Keyword method, Mixed reality, Smart phones, Smartphones, Students, Text-to-image synthesis, Visualization, Vocabulary learning},
pubstate = {published},
tppubtype = {inproceedings}
}
Dang, B.; Huynh, L.; Gul, F.; Rosé, C.; Järvelä, S.; Nguyen, A.
Human–AI collaborative learning in mixed reality: Examining the cognitive and socio-emotional interactions Journal Article
In: British Journal of Educational Technology, 2025, ISSN: 00071013 (ISSN).
Abstract | Links | BibTeX | Tags: Artificial intelligence agent, Collaborative learning, Educational robots, Embodied agent, Emotional intelligence, Emotional interactions, Generative adversarial networks, generative artificial intelligence, Hierarchical clustering, Human–AI collaboration, Interaction pattern, Mixed reality, ordered network analysis, Ordered network analyze, Social behavior, Social interactions, Social psychology, Students, Supervised learning, Teaching
@article{dang_humanai_2025,
title = {Human–AI collaborative learning in mixed reality: Examining the cognitive and socio-emotional interactions},
author = {B. Dang and L. Huynh and F. Gul and C. Rosé and S. Järvelä and A. Nguyen},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105007896240&doi=10.1111%2fbjet.13607&partnerID=40&md5=b58a641069461f8880d1ee0adcf42457},
doi = {10.1111/bjet.13607},
issn = {00071013 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {British Journal of Educational Technology},
abstract = {The rise of generative artificial intelligence (GAI), especially with multimodal large language models like GPT-4o, sparked transformative potential and challenges for learning and teaching. With potential as a cognitive offloading tool, GAI can enable learners to focus on higher-order thinking and creativity. Yet, this also raises questions about integration into traditional education due to the limited research on learners' interactions with GAI. Some studies with GAI focus on text-based human–AI interactions, while research on embodied GAI in immersive environments like mixed reality (MR) remains unexplored. To address this, this study investigates interaction dynamics between learners and embodied GAI agents in MR, examining cognitive and socio-emotional interactions during collaborative learning. We investigated the paired interactive patterns between a student and an embodied GAI agent in MR, based on data from 26 higher education students with 1317 recorded activities. Data were analysed using a multi-layered learning analytics approach, including quantitative content analysis, sequence analysis via hierarchical clustering and pattern analysis through ordered network analysis (ONA). Our findings identified two interaction patterns: type (1) AI-led Supported Exploratory Questioning (AISQ) and type (2) Learner-Initiated Inquiry (LII) group. Despite their distinction in characteristic, both types demonstrated comparable levels of socio-emotional engagement and exhibited meaningful cognitive engagement, surpassing the superficial content reproduction that can be observed in interactions with GPT models. This study contributes to the human–AI collaboration and learning studies, extending understanding to learning in MR environments and highlighting implications for designing AI-based educational tools. Practitioner notes What is already known about this topic Socio-emotional interactions are fundamental to cognitive processes and play a critical role in collaborative learning. Generative artificial intelligence (GAI) holds transformative potential for education but raises questions about how learners interact with such technology. Most existing research focuses on text-based interactions with GAI; there is limited empirical evidence on how embodied GAI agents within immersive environments like Mixed Reality (MR) influence the cognitive and socio-emotional interactions for learning and regulation. What this paper adds Provides first empirical insights into cognitive and socio-emotional interaction patterns between learners and embodied GAI agents in MR environments. Identifies two distinct interaction patterns: AISQ type (structured, guided, supportive) and LII type (inquiry-driven, exploratory, engaging), demonstrating how these patterns influence collaborative learning dynamics. Shows that both interaction types facilitate meaningful cognitive engagement, moving beyond superficial content reproduction commonly associated with GAI interactions. Implications for practice and/or policy Insights from the identified interaction patterns can inform the design of teaching strategies that effectively integrate embodied GAI agents to enhance both cognitive and socio-emotional engagement. Findings can guide the development of AI-based educational tools that capitalise on the capabilities of embodied GAI agents, supporting a balance between structured guidance and exploratory learning. Highlights the need for ethical considerations in adopting embodied GAI agents, particularly regarding the human-like realism of these agents and potential impacts on learner dependency and interaction norms. © 2025 The Author(s). British Journal of Educational Technology published by John Wiley & Sons Ltd on behalf of British Educational Research Association.},
keywords = {Artificial intelligence agent, Collaborative learning, Educational robots, Embodied agent, Emotional intelligence, Emotional interactions, Generative adversarial networks, generative artificial intelligence, Hierarchical clustering, Human–AI collaboration, Interaction pattern, Mixed reality, ordered network analysis, Ordered network analyze, Social behavior, Social interactions, Social psychology, Students, Supervised learning, Teaching},
pubstate = {published},
tppubtype = {article}
}
Zhu, X. T.; Cheerman, H.; Cheng, M.; Kiami, S. R.; Chukoskie, L.; McGivney, E.
Designing VR Simulation System for Clinical Communication Training with LLMs-Based Embodied Conversational Agents Proceedings Article
In: Conf Hum Fact Comput Syst Proc, Association for Computing Machinery, 2025, ISBN: 979-840071395-8 (ISBN).
Abstract | Links | BibTeX | Tags: Clinical communications, Clinical Simulation, Communications training, Curricula, Embodied conversational agent, Embodied Conversational Agents, Health professions, Intelligent virtual agents, Language Model, Medical education, Model-based OPC, Patient simulators, Personnel training, Students, Teaching, User centered design, Virtual environments, Virtual Reality, VR simulation, VR simulation systems
@inproceedings{zhu_designing_2025,
title = {Designing VR Simulation System for Clinical Communication Training with LLMs-Based Embodied Conversational Agents},
author = {X. T. Zhu and H. Cheerman and M. Cheng and S. R. Kiami and L. Chukoskie and E. McGivney},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105005754066&doi=10.1145%2f3706599.3719693&partnerID=40&md5=4468fbd54b43d6779259300afd08632e},
doi = {10.1145/3706599.3719693},
isbn = {979-840071395-8 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Conf Hum Fact Comput Syst Proc},
publisher = {Association for Computing Machinery},
abstract = {VR simulation in Health Professions (HP) education demonstrates huge potential, but fixed learning content with little customization limits its application beyond lab environments. To address these limitations in the context of VR for patient communication training, we conducted a user-centered study involving semi-structured interviews with advanced HP students to understand their challenges in clinical communication training and perceptions of VR-based solutions. From this, we derived design insights emphasizing the importance of realistic scenarios, simple interactions, and unpredictable dialogues. Building on these insights, we developed the Virtual AI Patient Simulator (VAPS), a novel VR system powered by Large Language Models (LLMs) and Embodied Conversational Agents (ECAs), supporting dynamic and customizable patient interactions for immersive learning. We also provided an example of how clinical professors could use user-friendly design forms to create personalized scenarios that align with course objectives in VAPS and discuss future implications of integrating AI-driven technologies into VR education. © 2025 Copyright held by the owner/author(s).},
keywords = {Clinical communications, Clinical Simulation, Communications training, Curricula, Embodied conversational agent, Embodied Conversational Agents, Health professions, Intelligent virtual agents, Language Model, Medical education, Model-based OPC, Patient simulators, Personnel training, Students, Teaching, User centered design, Virtual environments, Virtual Reality, VR simulation, VR simulation systems},
pubstate = {published},
tppubtype = {inproceedings}
}
Hassoulas, A.; Crawford, O.; Hemrom, S.; Almeida, A.; Coffey, M. J.; Hodgson, M.; Leveridge, B.; Karwa, D.; Lethbridge, A.; Williams, H.; Voisey, A.; Reed, K.; Patel, S.; Hart, K.; Shaw, H.
A pilot study investigating the efficacy of technology enhanced case based learning (CBL) in small group teaching Journal Article
In: Scientific Reports, vol. 15, no. 1, 2025, ISSN: 20452322 (ISSN).
Abstract | Links | BibTeX | Tags: coronavirus disease 2019, Covid-19, epidemiology, female, human, Humans, Learning, male, Medical, Medical student, Pilot Projects, pilot study, problem based learning, Problem-Based Learning, procedures, SARS-CoV-2, Severe acute respiratory syndrome coronavirus 2, Students, Teaching, Virtual Reality
@article{hassoulas_pilot_2025,
title = {A pilot study investigating the efficacy of technology enhanced case based learning (CBL) in small group teaching},
author = {A. Hassoulas and O. Crawford and S. Hemrom and A. Almeida and M. J. Coffey and M. Hodgson and B. Leveridge and D. Karwa and A. Lethbridge and H. Williams and A. Voisey and K. Reed and S. Patel and K. Hart and H. Shaw},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105004223025&doi=10.1038%2fs41598-025-99764-5&partnerID=40&md5=8588cac4c3ffe437e667ba4373e010ec},
doi = {10.1038/s41598-025-99764-5},
issn = {20452322 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {Scientific Reports},
volume = {15},
number = {1},
abstract = {The recent paradigm shift in teaching provision within higher education, following the COVID-19 pandemic, has led to blended models of learning prevailing in the pedagogic literature and in education practice. This shift has also resulted in an abundance of tools and technologies coming to market. Whilst the value of integrating technology into teaching and assessment has been well-established in the literature, the magnitude of choice available to educators and to students can be overwhelming. The current pilot investigated the feasibility of integrating key technologies in delivering technology-enhanced learning (TEL) case-based learning (CBL) within a sample of year two medical students. The cohort was selected at random, as was the control group receiving conventional CBL. Both groups were matched on prior academic performance. The TEL-CBL group received (1) in-person tutorials delivered within an immersive learning suite, (2) access to 3D anatomy software to explore during their self-directed learning time, (3) virtual reality (VR) guided anatomy exploration during tutorials, (4) access to a generative AI-based simulated virtual patient repository to practice key skills such as communication and history taking, and (5) an immersive medical emergency simulation. Metrics assessed included formative academic performance, student learning experience, and confidence in relation to communication and clinical skills. The results revealed that the TEL-CBL group outperformed their peers in successive formative assessments (p < 0.05), engaged thoroughly with the technologies at their disposal, and reported that these technologies enhanced their learning experience. Furthermore, students reported that access to the GenAI-simulated virtual patient platform and the immersive medical emergency simulation improved their clinical confidence and gave them a useful insight into what they can expect during the clinical phase of their medical education. The results are discussed in relation to the advantages that key emerging technologies may play in enhancing student performance, experience and confidence. © The Author(s) 2025.},
keywords = {coronavirus disease 2019, Covid-19, epidemiology, female, human, Humans, Learning, male, Medical, Medical student, Pilot Projects, pilot study, problem based learning, Problem-Based Learning, procedures, SARS-CoV-2, Severe acute respiratory syndrome coronavirus 2, Students, Teaching, Virtual Reality},
pubstate = {published},
tppubtype = {article}
}
Yadav, R.; Huzooree, G.; Yadav, M.; Gangodawilage, D. S. K.
Generative AI for personalized learning content creation Book Section
In: Transformative AI Practices for Personalized Learning Strategies, pp. 107–130, IGI Global, 2025, ISBN: 979-836938746-7 (ISBN); 979-836938744-3 (ISBN).
Abstract | Links | BibTeX | Tags: Adaptive feedback, Advanced Analytics, AI systems, Contrastive Learning, Educational contents, Educational experiences, Enhanced learning, Ethical technology, Federated learning, Immersive, Learning content creation, Personalized learning, Student engagement, Students, Supervised learning, Tools and applications, Virtual Reality
@incollection{yadav_generative_2025,
title = {Generative AI for personalized learning content creation},
author = {R. Yadav and G. Huzooree and M. Yadav and D. S. K. Gangodawilage},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105005387236&doi=10.4018%2f979-8-3693-8744-3.ch005&partnerID=40&md5=904e58b9c6de83dcd431c1706dda02b3},
doi = {10.4018/979-8-3693-8744-3.ch005},
isbn = {979-836938746-7 (ISBN); 979-836938744-3 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Transformative AI Practices for Personalized Learning Strategies},
pages = {107–130},
publisher = {IGI Global},
abstract = {Generative AI has emerged as a transformative force in personalized learning, offering unprecedented opportunities to tailor educational content to individual needs. By leveraging advanced algorithms and data analysis, AI systems can dynamically generate customized materials, provide adaptive feedback, and foster student engagement. This chapter explores the intersection of generative AI and personalized learning, discussing its techniques, tools, and applications in creating immersive and adaptive educational experiences. Key benefits include enhanced learning outcomes, efficiency, and scalability. However, challenges such as data privacy, algorithmic bias, and equitable access must be addressed to ensure responsible implementation. Future trends, including the integration of immersive technologies like Virtual Reality (VR) and predictive analytics, highlight AI's potential to revolutionize education. By navigating ethical considerations and fostering transparency, generative AI can become a powerful ally in creating inclusive, engaging, and student- centered learning environments. © 2025, IGI Global Scientific Publishing. All rights reserved.},
keywords = {Adaptive feedback, Advanced Analytics, AI systems, Contrastive Learning, Educational contents, Educational experiences, Enhanced learning, Ethical technology, Federated learning, Immersive, Learning content creation, Personalized learning, Student engagement, Students, Supervised learning, Tools and applications, Virtual Reality},
pubstate = {published},
tppubtype = {incollection}
}
Ly, C.; Peng, E.; Liu, K.; Qin, A.; Howe, G.; Cheng, A. Y.; Cuadra, A.
Museum in the Classroom: Engaging Students with Augmented Reality Museum Artifacts and Generative AI Proceedings Article
In: Conf Hum Fact Comput Syst Proc, Association for Computing Machinery, 2025, ISBN: 979-840071395-8 (ISBN).
Abstract | Links | BibTeX | Tags: Artifact or System, Child/parent, Children/Parents, Digitisation, Education/Learning, Engaging students, Engineering education, Field trips, Interactive learning, Learning experiences, Rich learning experiences, Students, Teachers', Teaching
@inproceedings{ly_museum_2025,
title = {Museum in the Classroom: Engaging Students with Augmented Reality Museum Artifacts and Generative AI},
author = {C. Ly and E. Peng and K. Liu and A. Qin and G. Howe and A. Y. Cheng and A. Cuadra},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105005741934&doi=10.1145%2f3706599.3719787&partnerID=40&md5=08816dd8d41bc34a0dc2d355985e2cc4},
doi = {10.1145/3706599.3719787},
isbn = {979-840071395-8 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {Conf Hum Fact Comput Syst Proc},
publisher = {Association for Computing Machinery},
abstract = {Museum field trips provide a rich learning experience for children. However, they are complex and expensive for teachers to organize. Fortunately, digitization of museum artifacts makes it possible to use museum resources within the classroom. Museum in the Classroom (MITC) explores how augmented reality (AR) and generative artificial intelligence (AI) can create an interactive learning experience around museum artifacts. This iPad app allows educators to select historical topics from a curated artifact library, generating AR-based exhibits that students can explore. MITC engages students through interactive AR artifacts, AI-driven chatbots, and AI-generated quiz questions, based on a real exhibition at the Cantor Arts Center at Stanford University. A formative study with middle schoolers (N = 20) demonstrated that the app increased engagement compared to traditional learning methods. MITC also fostered a playful and comfortable environment to interact with museum artifacts. Our findings suggest that combining AR and AI has the potential to enrich classroom learning and offer a scalable alternative to traditional museum visits. © 2025 Copyright held by the owner/author(s).},
keywords = {Artifact or System, Child/parent, Children/Parents, Digitisation, Education/Learning, Engaging students, Engineering education, Field trips, Interactive learning, Learning experiences, Rich learning experiences, Students, Teachers', Teaching},
pubstate = {published},
tppubtype = {inproceedings}
}
2024
Harinee, S.; Raja, R. Vimal; Mugila, E.; Govindharaj, I.; Sanjaykumar, V.; Ragavendhiran, T.
Elevating Medical Training: A Synergistic Fusion of AI and VR for Immersive Anatomy Learning and Practical Procedure Mastery Proceedings Article
In: Int. Conf. Syst., Comput., Autom. Netw., ICSCAN, Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 979-833151002-2 (ISBN).
Abstract | Links | BibTeX | Tags: 'current, Anatomy education, Anatomy educations, Computer interaction, Curricula, Embodied virtual assistant, Embodied virtual assistants, Generative AI, Human- Computer Interaction, Immersive, Intelligent virtual agents, Medical computing, Medical education, Medical procedure practice, Medical procedures, Medical training, Personnel training, Students, Teaching, Three dimensional computer graphics, Usability engineering, Virtual assistants, Virtual environments, Virtual Reality, Visualization
@inproceedings{harinee_elevating_2024,
title = {Elevating Medical Training: A Synergistic Fusion of AI and VR for Immersive Anatomy Learning and Practical Procedure Mastery},
author = {S. Harinee and R. Vimal Raja and E. Mugila and I. Govindharaj and V. Sanjaykumar and T. Ragavendhiran},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105000334626&doi=10.1109%2fICSCAN62807.2024.10894451&partnerID=40&md5=100899b489c00335e0a652f2efd33e23},
doi = {10.1109/ICSCAN62807.2024.10894451},
isbn = {979-833151002-2 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Int. Conf. Syst., Comput., Autom. Netw., ICSCAN},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {Virtual reality with its 3D visualization have brought an overwhelming change in the face of medical education, especially for courses like human anatomy. The proposed virtual reality system to bring massive improvements in the education received by a medical student studying for their degree courses. The project puts forward the text-to-speech and speech-to-text aligned system that simplifies the usage of a chatbot empowered by OpenAI GPT-4 and allows pupils to vocally speak with Avatar, the set virtual assistant. Contrary to the current methodologies, the setup of virtual reality is powered by avatars and thus covers an enhanced virtual assistant environment. Avatars offer students the set of repeated practicing of medical procedures on it, and the real uniqueness in the proposed product. The developed virtual reality environment is enhanced over other current training techniques where a student should interact and immerse in three-dimensional human organs for visualization in three dimensions and hence get better knowledge of the subjects in greater depth. A virtual assistant guides the whole process, giving insights and support to help the student bridge the gap from theory to practice. Then, the system is essentially Knowledge based and Analysis based approach. The combination of generative AI along with embodied virtual agents has great potential when it comes to customized virtual conversation assistant for much wider range of applications. The study brings out the value of acquiring hands-on skills through simulated medical procedures and opens new frontiers of research and development in AI, VR, and medical education. In addition to assessing the effectiveness of such novel functionalities, the study also explores user experience related dimensions such as usability, task loading, and the sense of presence in proposed virtual medical environment. © 2024 IEEE.},
keywords = {'current, Anatomy education, Anatomy educations, Computer interaction, Curricula, Embodied virtual assistant, Embodied virtual assistants, Generative AI, Human- Computer Interaction, Immersive, Intelligent virtual agents, Medical computing, Medical education, Medical procedure practice, Medical procedures, Medical training, Personnel training, Students, Teaching, Three dimensional computer graphics, Usability engineering, Virtual assistants, Virtual environments, Virtual Reality, Visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Samson, J.; Lameras, P.; Taylor, N.; Kneafsey, R.
Fostering a Co-creation Process for the Development of an Extended Reality Healthcare Education Resource Proceedings Article
In: M.E., Auer; T., Tsiatsos (Ed.): Lect. Notes Networks Syst., pp. 205–212, Springer Science and Business Media Deutschland GmbH, 2024, ISBN: 23673370 (ISSN); 978-303156074-3 (ISBN).
Abstract | Links | BibTeX | Tags: Artificial intelligence, Co-creation, Creation process, Diagnosis, Education computing, Education resource, Extended reality, Health care education, Hospitals, Immersive, Inter professionals, Interprofessional Healthcare Education, Software products, Students, Virtual patients
@inproceedings{samson_fostering_2024,
title = {Fostering a Co-creation Process for the Development of an Extended Reality Healthcare Education Resource},
author = {J. Samson and P. Lameras and N. Taylor and R. Kneafsey},
editor = {Auer M.E. and Tsiatsos T.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85189759614&doi=10.1007%2f978-3-031-56075-0_20&partnerID=40&md5=6ae832882a2e224094c1beb81c925333},
doi = {10.1007/978-3-031-56075-0_20},
isbn = {23673370 (ISSN); 978-303156074-3 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Lect. Notes Networks Syst.},
volume = {937 LNNS},
pages = {205–212},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {The aim of this research is to create an immersive healthcare education resource using an extended reality (XR) platform. This platform leverages an existing software product, incorporating virtual patients with conversational capabilities driven by artificial intelligence (AI). The initial stage produced an early prototype focused on assessing an elderly virtual patient experiencing frailty. This scenario encompasses the hospital admission to post-discharge care at home, involving various healthcare professionals such as paramedics, emergency clinicians, diagnostic radiographers, geriatricians, physiotherapists, occupational therapists, nurses, operating department practitioners, dietitians, and social workers. The plan moving forward is to refine and expand this prototype through a co-creation with diverse stakeholders. The refinement process will include the introduction of updated scripts into the standard AI model. Furthermore, these scripts will be tested against a new hybrid model that combines generative AI. Ultimately, this resource will be co-designed to create a learning activity tailored for occupational therapy and physiotherapy students. This activity will undergo testing with a cohort of students, and the outcomes of this research are expected to inform the future development of interprofessional virtual simulated placements (VSPs). These placements will complement traditional clinical learning experiences, offering students an immersive environment to enhance their skills and knowledge in the healthcare field. © The Author(s), under exclusive license to Springer Nature Switzerland AG 2024.},
keywords = {Artificial intelligence, Co-creation, Creation process, Diagnosis, Education computing, Education resource, Extended reality, Health care education, Hospitals, Immersive, Inter professionals, Interprofessional Healthcare Education, Software products, Students, Virtual patients},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, Z.; Zhu, Z.; Zhu, L.; Jiang, E.; Hu, X.; Peppler, K.; Ramani, K.
ClassMeta: Designing Interactive Virtual Classmate to Promote VR Classroom Participation Proceedings Article
In: Conf Hum Fact Comput Syst Proc, Association for Computing Machinery, 2024, ISBN: 979-840070330-0 (ISBN).
Abstract | Links | BibTeX | Tags: 3D Avatars, Behavioral Research, Classroom learning, Collaborative learning, Computational Linguistics, Condition, E-Learning, Human behaviors, Language Model, Large language model, Learning experiences, Learning systems, pedagogical agent, Pedagogical agents, Students, Three dimensional computer graphics, Virtual Reality, VR classroom
@inproceedings{liu_classmeta_2024,
title = {ClassMeta: Designing Interactive Virtual Classmate to Promote VR Classroom Participation},
author = {Z. Liu and Z. Zhu and L. Zhu and E. Jiang and X. Hu and K. Peppler and K. Ramani},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85194868458&doi=10.1145%2f3613904.3642947&partnerID=40&md5=0592b2f977a2ad2e6366c6fa05808a6a},
doi = {10.1145/3613904.3642947},
isbn = {979-840070330-0 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Conf Hum Fact Comput Syst Proc},
publisher = {Association for Computing Machinery},
abstract = {Peer influence plays a crucial role in promoting classroom participation, where behaviors from active students can contribute to a collective classroom learning experience. However, the presence of these active students depends on several conditions and is not consistently available across all circumstances. Recently, Large Language Models (LLMs) such as GPT have demonstrated the ability to simulate diverse human behaviors convincingly due to their capacity to generate contextually coherent responses based on their role settings. Inspired by this advancement in technology, we designed ClassMeta, a GPT-4 powered agent to help promote classroom participation by playing the role of an active student. These agents, which are embodied as 3D avatars in virtual reality, interact with actual instructors and students with both spoken language and body gestures. We conducted a comparative study to investigate the potential of ClassMeta for improving the overall learning experience of the class. © 2024 Copyright held by the owner/author(s)},
keywords = {3D Avatars, Behavioral Research, Classroom learning, Collaborative learning, Computational Linguistics, Condition, E-Learning, Human behaviors, Language Model, Large language model, Learning experiences, Learning systems, pedagogical agent, Pedagogical agents, Students, Three dimensional computer graphics, Virtual Reality, VR classroom},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, M.; M'Hiri, F.
Beyond Traditional Teaching: Large Language Models as Simulated Teaching Assistants in Computer Science Proceedings Article
In: SIGCSE - Proc. ACM Tech. Symp. Comput. Sci. Educ., pp. 743–749, Association for Computing Machinery, Inc, 2024, ISBN: 979-840070423-9 (ISBN).
Abstract | Links | BibTeX | Tags: Adaptive teaching, ChatGPT, Computational Linguistics, CS education, E-Learning, Education computing, Engineering education, GPT, Language Model, LLM, machine learning, Machine-learning, Novice programmer, novice programmers, Openai, Programming, Python, Students, Teaching, Virtual Reality
@inproceedings{liu_beyond_2024,
title = {Beyond Traditional Teaching: Large Language Models as Simulated Teaching Assistants in Computer Science},
author = {M. Liu and F. M'Hiri},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85189289344&doi=10.1145%2f3626252.3630789&partnerID=40&md5=44ec79c8f005f4551c820c61f5b5d435},
doi = {10.1145/3626252.3630789},
isbn = {979-840070423-9 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {SIGCSE - Proc. ACM Tech. Symp. Comput. Sci. Educ.},
volume = {1},
pages = {743–749},
publisher = {Association for Computing Machinery, Inc},
abstract = {As the prominence of Large Language Models (LLMs) grows in various sectors, their potential in education warrants exploration. In this study, we investigate the feasibility of employing GPT-3.5 from OpenAI, as an LLM teaching assistant (TA) or a virtual TA in computer science (CS) courses. The objective is to enhance the accessibility of CS education while maintaining academic integrity by refraining from providing direct solutions to current-semester assignments. Targeting Foundations of Programming (COMP202), an undergraduate course that introduces students to programming with Python, we have developed a virtual TA using the LangChain framework, known for integrating language models with diverse data sources and environments. The virtual TA assists students with their code and clarifies complex concepts. For homework questions, it is designed to guide students with hints rather than giving out direct solutions. We assessed its performance first through a qualitative evaluation, then a survey-based comparative analysis, using a mix of questions commonly asked on the COMP202 discussion board and questions created by the authors. Our preliminary results indicate that the virtual TA outperforms human TAs on clarity and engagement, matching them on accuracy when the question is non-assignment-specific, for which human TAs still proved more reliable. These findings suggest that while virtual TAs, leveraging the capabilities of LLMs, hold great promise towards making CS education experience more accessible and engaging, their optimal use necessitates human supervision. We conclude by identifying several directions that could be explored in future implementations. © 2024 ACM.},
keywords = {Adaptive teaching, ChatGPT, Computational Linguistics, CS education, E-Learning, Education computing, Engineering education, GPT, Language Model, LLM, machine learning, Machine-learning, Novice programmer, novice programmers, Openai, Programming, Python, Students, Teaching, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Jia, Y.; Sin, Z. P. T.; Wang, X. E.; Li, C.; Ng, P. H. F.; Huang, X.; Dong, J.; Wang, Y.; Baciu, G.; Cao, J.; Li, Q.
NivTA: Towards a Naturally Interactable Edu-Metaverse Teaching Assistant for CAVE Proceedings Article
In: Proc. - IEEE Int. Conf. Metaverse Comput., Netw., Appl., MetaCom, pp. 57–64, Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 979-833151599-7 (ISBN).
Abstract | Links | BibTeX | Tags: Active learning, Adversarial machine learning, cave automatic virtual environment, Cave automatic virtual environments, Caves, Chatbots, Contrastive Learning, Digital elevation model, Federated learning, Interactive education, Language Model, Large language model agent, Learning Activity, LLM agents, Metaverses, Model agents, Natural user interface, Students, Teaching, Teaching assistants, Virtual environments, Virtual Reality, virtual teaching assistant, Virtual teaching assistants
@inproceedings{jia_nivta_2024,
title = {NivTA: Towards a Naturally Interactable Edu-Metaverse Teaching Assistant for CAVE},
author = {Y. Jia and Z. P. T. Sin and X. E. Wang and C. Li and P. H. F. Ng and X. Huang and J. Dong and Y. Wang and G. Baciu and J. Cao and Q. Li},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85211447638&doi=10.1109%2fMetaCom62920.2024.00023&partnerID=40&md5=efefd453c426e74705518254bdc49e87},
doi = {10.1109/MetaCom62920.2024.00023},
isbn = {979-833151599-7 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Proc. - IEEE Int. Conf. Metaverse Comput., Netw., Appl., MetaCom},
pages = {57–64},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {Edu-metaverse is a specialized metaverse dedicated for interactive education in an immersive environment. Its main purpose is to immerse the learners in a digital environment and conduct learning activities that could mirror reality. Not only does it enable activities that may be difficult to perform in the real world, but it also extends the interaction to personalized and CL. This is a more effective pedagogical approach as it tends to enhance the motivation and engagement of students and it increases their active participation in lessons delivered. To this extend, we propose to realize an interactive virtual teaching assistant called NivTA. To make NivTA easily accessible and engaging by multiple users simultaneously, we also propose to use a CAVE virtual environment (CAVE-VR) as a "metaverse window"into concepts, ideas, topics, and learning activities. The students simply need to step into the CAVE-VR and interact with a life-size teaching assistant that they can engage with naturally, as if they are approaching a real person. Instead of text-based interaction currently developed for large language models (LLM), NivTA is given additional cues regarding the users so it can react more naturally via a specific prompt design. For example, the user can simply point to an educational concept and ask NivTA to explain what it is. To guide NivTA onto the educational concept, the prompt is also designed to feed in an educational KG to provide NivTA with the context of the student's question. The NivTA system is an integration of several components that are discussed in this paper. We further describe how the system is designed and implemented, along with potential applications and future work on interactive collaborative edu-metaverse environments dedicated for teaching and learning. © 2024 IEEE.},
keywords = {Active learning, Adversarial machine learning, cave automatic virtual environment, Cave automatic virtual environments, Caves, Chatbots, Contrastive Learning, Digital elevation model, Federated learning, Interactive education, Language Model, Large language model agent, Learning Activity, LLM agents, Metaverses, Model agents, Natural user interface, Students, Teaching, Teaching assistants, Virtual environments, Virtual Reality, virtual teaching assistant, Virtual teaching assistants},
pubstate = {published},
tppubtype = {inproceedings}
}
Kapadia, N.; Gokhale, S.; Nepomuceno, A.; Cheng, W.; Bothwell, S.; Mathews, M.; Shallat, J. S.; Schultz, C.; Gupta, A.
Evaluation of Large Language Model Generated Dialogues for an AI Based VR Nurse Training Simulator Proceedings Article
In: J.Y.C., Chen; G., Fragomeni (Ed.): Lect. Notes Comput. Sci., pp. 200–212, Springer Science and Business Media Deutschland GmbH, 2024, ISBN: 03029743 (ISSN); 978-303161040-0 (ISBN).
Abstract | Links | BibTeX | Tags: Bard, ChatGPT, ClaudeAI, Clinical research, Computational Linguistics, Dialogue Generation, Dialogue generations, Education computing, Extended reality, Health care education, Healthcare Education, Language Model, Language processing, Large language model, large language models, Natural Language Processing, Natural language processing systems, Natural languages, Nurse Training Simulation, Nursing, Patient avatar, Patient Avatars, Semantics, Students, Training simulation, Virtual Reality
@inproceedings{kapadia_evaluation_2024,
title = {Evaluation of Large Language Model Generated Dialogues for an AI Based VR Nurse Training Simulator},
author = {N. Kapadia and S. Gokhale and A. Nepomuceno and W. Cheng and S. Bothwell and M. Mathews and J. S. Shallat and C. Schultz and A. Gupta},
editor = {Chen J.Y.C. and Fragomeni G.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85196200653&doi=10.1007%2f978-3-031-61041-7_13&partnerID=40&md5=8890a8d0c289fdf6e7ab82e105249097},
doi = {10.1007/978-3-031-61041-7_13},
isbn = {03029743 (ISSN); 978-303161040-0 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Lect. Notes Comput. Sci.},
volume = {14706 LNCS},
pages = {200–212},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {This paper explores the efficacy of Large Language Models (LLMs) in generating dialogues for patient avatars in Virtual Reality (VR) nurse training simulators. With the integration of technology in healthcare education evolving rapidly, the potential of NLP to enhance nurse training through realistic patient interactions presents a significant opportunity. Our study introduces a novel LLM-based dialogue generation system, leveraging models such as ChatGPT, GoogleBard, and ClaudeAI. We detail the development of our script generation system, which was a collaborative endeavor involving nurses, technical artists, and developers. The system, tested on the Meta Quest 2 VR headset, integrates complex dialogues created through a synthesis of clinical expertise and advanced NLP, aimed at simulating real-world nursing scenarios. Through a comprehensive evaluation involving lexical and semantic similarity tests compared to clinical expert-generated scripts, we assess the potential of LLMs as suitable alternatives for script generation. The findings aim to contribute to the development of a more interactive and effective VR nurse training simulator, enhancing communication skills among nursing students for improved patient care outcomes. This research underscores the importance of advanced NLP applications in healthcare education, offering insights into the practicality and limitations of employing LLMs in clinical training environments. © The Author(s), under exclusive license to Springer Nature Switzerland AG 2024.},
keywords = {Bard, ChatGPT, ClaudeAI, Clinical research, Computational Linguistics, Dialogue Generation, Dialogue generations, Education computing, Extended reality, Health care education, Healthcare Education, Language Model, Language processing, Large language model, large language models, Natural Language Processing, Natural language processing systems, Natural languages, Nurse Training Simulation, Nursing, Patient avatar, Patient Avatars, Semantics, Students, Training simulation, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Sikström, P.; Valentini, C.; Sivunen, A.; Kärkkäinen, T.
Pedagogical agents communicating and scaffolding students' learning: High school teachers' and students' perspectives Journal Article
In: Computers and Education, vol. 222, 2024, ISSN: 03601315 (ISSN).
Abstract | Links | BibTeX | Tags: Adversarial machine learning, Agents communication, Augmented Reality, Contrastive Learning, Federated learning, Human communications, Human-Machine Communication, Human-to-human communication script, Human–machine communication, Human–machine communication (HMC), pedagogical agent, Pedagogical agents, Scaffolds, Scaffolds (biology), Secondary education, Student learning, Students, Teachers', Teaching, User-centered design, User-centred, Virtual environments
@article{sikstrom_pedagogical_2024,
title = {Pedagogical agents communicating and scaffolding students' learning: High school teachers' and students' perspectives},
author = {P. Sikström and C. Valentini and A. Sivunen and T. Kärkkäinen},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85202198552&doi=10.1016%2fj.compedu.2024.105140&partnerID=40&md5=dfb4a7b6c1f6352c5cc6faac213e938f},
doi = {10.1016/j.compedu.2024.105140},
issn = {03601315 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Computers and Education},
volume = {222},
abstract = {Pedagogical agents (PAs) communicate verbally and non-verbally with students in digital and virtual reality/augmented reality learning environments. PAs have been shown to be beneficial for learning, and generative artificial intelligence, such as large language models, can improve PAs' communication abilities significantly. K-12 education is underrepresented in learning technology research and teachers' and students' insights have not been considered when developing PA communication. The current study addresses this research gap by conducting and analyzing semi-structured, in-depth interviews with eleven high school teachers and sixteen high school students about their expectations for PAs' communication capabilities. The interviewees identified relational and task-related communication capabilities that a PA should perform to communicate effectively with students and scaffold their learning. PA communication that is simultaneously affirmative and relational can induce immediacy, foster the relationship and engagement with a PA, and support students' learning management. Additionally, the teachers and students described the activities and technological aspects that should be considered when designing conversational PAs. The study showed that teachers and students applied human-to-human communication scripts when outlining their desired PA communication characteristics. The study offers novel insights and recommendations to researchers and developers on the communicational, pedagogical, and technological aspects that must be considered when designing communicative PAs that scaffold students’ learning, and discusses the contributions on human–machine communication in education. © 2024 The Authors},
keywords = {Adversarial machine learning, Agents communication, Augmented Reality, Contrastive Learning, Federated learning, Human communications, Human-Machine Communication, Human-to-human communication script, Human–machine communication, Human–machine communication (HMC), pedagogical agent, Pedagogical agents, Scaffolds, Scaffolds (biology), Secondary education, Student learning, Students, Teachers', Teaching, User-centered design, User-centred, Virtual environments},
pubstate = {published},
tppubtype = {article}
}
Seo, W. J.; Kim, M.
Utilization of Generative Artificial Intelligence in Nursing Education: A Topic Modeling Analysis Journal Article
In: Education Sciences, vol. 14, no. 11, 2024, ISSN: 22277102 (ISSN).
Abstract | Links | BibTeX | Tags: generative artificial intelligence, Nursing, nursing education, nursing education research, patients, Students, topic modeling
@article{seo_utilization_2024,
title = {Utilization of Generative Artificial Intelligence in Nursing Education: A Topic Modeling Analysis},
author = {W. J. Seo and M. Kim},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85210378858&doi=10.3390%2feducsci14111234&partnerID=40&md5=417127fbeb94cc40d893efe11a149ad3},
doi = {10.3390/educsci14111234},
issn = {22277102 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Education Sciences},
volume = {14},
number = {11},
abstract = {The advent of artificial intelligence (AI) has prompted the introduction of novel digital technologies, including mobile learning and metaverse learning, into nursing students’ learning environments. This study used text network and topic modeling analyses to identify the research trends in generative AI in nursing education for students and patients in schools, hospitals, and community settings. Additionally, an ego network analysis using strengths, weaknesses, opportunities, and threats (SWOT) words was performed to develop a comprehensive understanding of factors that impact the integration of generative AI in nursing education. The literature was searched from five databases published until July 2024. After excluding studies whose abstracts were not available and removing duplicates, 139 articles were identified. The seven derived topics were labeled as usability in future scientific applications, application and integration of technology, simulation education, utility in image and text analysis, performance in exams, utility in assignments, and patient education. The ego network analysis focusing on the SWOT keywords revealed “healthcare”, “use”, and “risk” were common keywords. The limited emphasis on “threats”, “strengths”, and “weaknesses” compared to “opportunities” in the SWOT analysis indicated that these areas are relatively underexplored in nursing education. To integrate generative AI technology into education such as simulation training, teaching activities, and the development of personalized learning, it is necessary to identify relevant internal strengths and weaknesses of schools, hospitals, and communities that apply it, and plan practical application strategies aligned with clear institutional guidelines. © 2024 by the authors.},
keywords = {generative artificial intelligence, Nursing, nursing education, nursing education research, patients, Students, topic modeling},
pubstate = {published},
tppubtype = {article}
}
Chan, A.; Liu, J. A.
Board 24: Development of Multi-User-enabled, Interactive, and Responsive Virtual/Augmented Reality-based Laboratory Training System Proceedings Article
In: ASEE Annu. Conf. Expos. Conf. Proc., American Society for Engineering Education, 2024, ISBN: 21535965 (ISSN).
Abstract | Links | BibTeX | Tags: 'current, Augmented Reality, Chemical engineering students, Concentration (process), Cooling towers, Degassing, Hands-on learning, Hydrodesulfurization, Immersive, Large groups, Liquid crystal displays, Multiusers, Nanoreactors, Personnel training, Pilot-scale equipment, Protective equipment, Students, Training Systems, Unit Operations Laboratory
@inproceedings{chan_board_2024,
title = {Board 24: Development of Multi-User-enabled, Interactive, and Responsive Virtual/Augmented Reality-based Laboratory Training System},
author = {A. Chan and J. A. Liu},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85202042560&partnerID=40&md5=9b1565ea2dd4336b4cc45594fe4f7900},
isbn = {21535965 (ISSN)},
year = {2024},
date = {2024-01-01},
booktitle = {ASEE Annu. Conf. Expos. Conf. Proc.},
publisher = {American Society for Engineering Education},
abstract = {The Unit Operations Laboratory (UOL) is a place where third-year chemical engineering students can apply their engineering and science concepts on pilot-scale equipment. However, the physical lab is resource-intensive, requiring protective equipment and constant supervision. In addition, due to limited units for large groups of students, students perform experiments according to the rolling program schedule, making alignment with lecture teaching and hands-on learning challenges. The research team focuses on increasing the accessibility of the UOL by using simulation gaming in standard, virtual reality and augmented reality modalities as an educational tool. The "Virtual Unit Ops Lab" application places students in an immersive simulated environment of the physical lab, where they can get practical experiences without the difficulties of an in-person lab by using specialized headsets and controllers, which allows the student to move and interact with various parts of the machine physically. Developed with Unity software, the application serves as a digital twin to an existing lab, which allows for an immersive simulation of the full-scale lab equipment, in addition to enhanced learning features such as the ability to display the current action performed by the user and to provide visual/audio feedback for correct and incorrect actions. The application also supports the use by multiple "players" (i.e., it has the "multiplayer" option), where multiple students can communicate and discuss their current step. As a work in progress, a non-player-character chatbot (generative AI responses) is being developed for existing applications using OpenAI's GPT-3.5, which provides designated information to a student in a conversational manner. Additionally, a supplemental "Augmented Unit Ops Lab" application uses Augmented Reality, which superimposes three-dimensional flow diagrams onto the Heat Exchanger through the view of a phone camera during the in-person labs. © American Society for Engineering Education, 2024.},
keywords = {'current, Augmented Reality, Chemical engineering students, Concentration (process), Cooling towers, Degassing, Hands-on learning, Hydrodesulfurization, Immersive, Large groups, Liquid crystal displays, Multiusers, Nanoreactors, Personnel training, Pilot-scale equipment, Protective equipment, Students, Training Systems, Unit Operations Laboratory},
pubstate = {published},
tppubtype = {inproceedings}
}
2023
Gaikwad, T.; Kulkarni, A.
Smart Training Framework and Assessment Strategies Proceedings Article
In: IEEE Eng. Informatics, EI, Institute of Electrical and Electronics Engineers Inc., 2023, ISBN: 979-835033852-2 (ISBN).
Abstract | Links | BibTeX | Tags: AR training, Assessment strategies, Augmented Reality, Augmented reality training, Computational Linguistics, Edtech, Education computing, Education sectors, Engineering education, Language Model, Large language model, large language models, Prompt engineering, Risk assessment, Smart assessment, Students, Training assessment, Training framework
@inproceedings{gaikwad_smart_2023,
title = {Smart Training Framework and Assessment Strategies},
author = {T. Gaikwad and A. Kulkarni},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85193969838&doi=10.1109%2fIEEECONF58110.2023.10520594&partnerID=40&md5=c23eba992e455b09829dd03d25fe567e},
doi = {10.1109/IEEECONF58110.2023.10520594},
isbn = {979-835033852-2 (ISBN)},
year = {2023},
date = {2023-01-01},
booktitle = {IEEE Eng. Informatics, EI},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {The rapidly evolving landscape of technological advancements is significantly transforming the education sector. This integration of technology in the education sector has given rise to the edtech industry which is transforming as newer technologies are introduced. Training delivered to the learners, along with the assessment of the learners, are the fundamental components of the education sector. However, current methods of delivering training and assessing learners face numerous challenges, including skill shortage due to technology advancements, high costs, conducting complex training in high- risk environments. Similarly, assessment methods struggle with inflexible assessment strategies and limited personalized feedback to learners. Addressing these challenges in training and assessment, this study proposes a smart training and assessment framework (STAF) which leverages the benefits of augmented reality (AR) and artificial intelligence (AI) based large language models (LLMs) which stand out as a monumental leap in reshaping the training and assessment sector. As part of this study, an AR based training module was created and delivered to students. A survey was conducted of these students to gain insights about the adaptability of AR based trainings and potential to improve these trainings. It is concluded that along with AR in education, AI and LLMs with prompt engineering strategies should be integrated in the education domain for better interactivity and enhanced student performance. Currently, limited research is conducted on integration of LLMs in AR environments for the education sector and this paper provides an in-depth exploration of the immense potential of the applications of LLMs within the realm of training and assessment for improved learner performance. © 2023 IEEE.},
keywords = {AR training, Assessment strategies, Augmented Reality, Augmented reality training, Computational Linguistics, Edtech, Education computing, Education sectors, Engineering education, Language Model, Large language model, large language models, Prompt engineering, Risk assessment, Smart assessment, Students, Training assessment, Training framework},
pubstate = {published},
tppubtype = {inproceedings}
}
Marquez, R.; Barrios, N.; Vera, R. E.; Mendez, M. E.; Tolosa, L.; Zambrano, F.; Li, Y.
A perspective on the synergistic potential of artificial intelligence and product-based learning strategies in biobased materials education Journal Article
In: Education for Chemical Engineers, vol. 44, pp. 164–180, 2023, ISSN: 17497728 (ISSN).
Abstract | Links | BibTeX | Tags: Artificial intelligence, Bio-based, Bio-based materials, Biobased, ChatGPT, Chemical engineering, Chemical engineering education, Education computing, Engineering education, Formulation, Generative AI, Learning strategy, Learning systems, Material engineering, Materials, Students, Sustainable development, Teaching approaches, Traditional materials, Virtual Reality
@article{marquez_perspective_2023,
title = {A perspective on the synergistic potential of artificial intelligence and product-based learning strategies in biobased materials education},
author = {R. Marquez and N. Barrios and R. E. Vera and M. E. Mendez and L. Tolosa and F. Zambrano and Y. Li},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85162078243&doi=10.1016%2fj.ece.2023.05.005&partnerID=40&md5=76cd274af795123f1e31e345dd36eded},
doi = {10.1016/j.ece.2023.05.005},
issn = {17497728 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Education for Chemical Engineers},
volume = {44},
pages = {164–180},
abstract = {The integration of product-based learning strategies in Materials in Chemical Engineering education is crucial for students to gain the skills and competencies required to thrive in the emerging circular bioeconomy. Traditional materials engineering education has often relied on a transmission teaching approach, in which students are expected to passively receive information from instructors. However, this approach has shown to be inadequate under the current circumstances, in which information is readily available and innovative tools such as artificial intelligence and virtual reality environments are becoming widespread (e.g., metaverse). Instead, we consider that a critical goal of education should be to develop aptitudes and abilities that enable students to generate solutions and products that address societal demands. In this work, we propose innovative strategies, such as product-based learning methods and GPT (Generative Pre-trained Transformer) artificial intelligence text generation models, to modify the focus of a Materials in Chemical Engineering course from non-sustainable materials to sustainable ones, aiming to address the critical challenges of our society. This approach aims to achieve two objectives: first to enable students to actively engage with raw materials and solve real-world challenges, and second, to foster creativity and entrepreneurship skills by providing them with the necessary tools to conduct brainstorming sessions and develop procedures following scientific methods. The incorporation of circular bioeconomy concepts, such as renewable resources, waste reduction, and resource efficiency into the curriculum provides a framework for students to understand the environmental, social, and economic implications in Chemical Engineering. It also allows them to make informed decisions within the circular bioeconomy framework, benefiting society by promoting the development and adoption of sustainable technologies and practices. © 2023 Institution of Chemical Engineers},
keywords = {Artificial intelligence, Bio-based, Bio-based materials, Biobased, ChatGPT, Chemical engineering, Chemical engineering education, Education computing, Engineering education, Formulation, Generative AI, Learning strategy, Learning systems, Material engineering, Materials, Students, Sustainable development, Teaching approaches, Traditional materials, Virtual Reality},
pubstate = {published},
tppubtype = {article}
}
Ayre, D.; Dougherty, C.; Zhao, Y.
IMPLEMENTATION OF AN ARTIFICIAL INTELLIGENCE (AI) INSTRUCTIONAL SUPPORT SYSTEM IN A VIRTUAL REALITY (VR) THERMAL-FLUIDS LABORATORY Proceedings Article
In: ASME Int Mech Eng Congress Expos Proc, American Society of Mechanical Engineers (ASME), 2023, ISBN: 978-079188765-3 (ISBN).
Abstract | Links | BibTeX | Tags: Artificial intelligence, E-Learning, Education computing, Engineering education, Fluid mechanics, Generative AI, generative artificial intelligence, GPT, High educations, Instructional support, Laboratories, Laboratory class, Laboratory experiments, Physical laboratory, Professional aspects, Students, Support systems, Thermal fluids, Virtual Reality, Virtual-reality environment
@inproceedings{ayre_implementation_2023,
title = {IMPLEMENTATION OF AN ARTIFICIAL INTELLIGENCE (AI) INSTRUCTIONAL SUPPORT SYSTEM IN A VIRTUAL REALITY (VR) THERMAL-FLUIDS LABORATORY},
author = {D. Ayre and C. Dougherty and Y. Zhao},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85185393784&doi=10.1115%2fIMECE2023-112683&partnerID=40&md5=c2492592a016478a4b3591ff82a93be5},
doi = {10.1115/IMECE2023-112683},
isbn = {978-079188765-3 (ISBN)},
year = {2023},
date = {2023-01-01},
booktitle = {ASME Int Mech Eng Congress Expos Proc},
volume = {8},
publisher = {American Society of Mechanical Engineers (ASME)},
abstract = {Physical laboratory experiments have long been the cornerstone of higher education, providing future engineers practical real-life experience invaluable to their careers. However, demand for laboratory time has exceeded physical capabilities. Virtual reality (VR) labs have proven to retain many benefits of attending physical labs while also providing significant advantages only available in a VR environment. Previously, our group had developed a pilot VR lab that replicated six (6) unique thermal-fluids lab experiments developed using the Unity game engine. One of the VR labs was tested in a thermal-fluid mechanics laboratory class with favorable results, but students highlighted the need for additional assistance within the VR simulation. In response to this testing, we have incorporated an artificial intelligence (AI) assistant to aid students within the VR environment by developing an interaction model. Utilizing the Generative Pre-trained Transformer 4 (GPT-4) large language model (LLM) and augmented context retrieval, the AI assistant can provide reliable instruction and troubleshoot errors while students conduct the lab procedure to provide an experience similar to a real-life lab assistant. The updated VR lab was tested in two laboratory classes and while the overall tone of student response to an AI-powered assistant was excitement and enthusiasm, observations and other recorded data show that students are currently unsure of how to utilize this new technology, which will help guide future refinement of AI components within the VR environment. © 2023 by ASME.},
keywords = {Artificial intelligence, E-Learning, Education computing, Engineering education, Fluid mechanics, Generative AI, generative artificial intelligence, GPT, High educations, Instructional support, Laboratories, Laboratory class, Laboratory experiments, Physical laboratory, Professional aspects, Students, Support systems, Thermal fluids, Virtual Reality, Virtual-reality environment},
pubstate = {published},
tppubtype = {inproceedings}
}