AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Kim, Y.; Aamir, Z.; Singh, M.; Boorboor, S.; Mueller, K.; Kaufman, A. E.
Explainable XR: Understanding User Behaviors of XR Environments Using LLM-Assisted Analytics Framework Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 31, no. 5, pp. 2756–2766, 2025, ISSN: 10772626 (ISSN), (Publisher: IEEE Computer Society).
Abstract | Links | BibTeX | Tags: adult, Agnostic, Article, Assistive, Cross Reality, Data Analytics, Data collection, data interpretation, Data recording, Data visualization, Extended reality, human, Language Model, Large language model, large language models, Multi-modal, Multimodal Data Collection, normal human, Personalized assistive technique, Personalized Assistive Techniques, recorder, Spatio-temporal data, therapy, user behavior, User behaviors, Virtual addresses, Virtual environments, Virtual Reality, Visual analytics, Visual languages
@article{kim_explainable_2025,
title = {Explainable XR: Understanding User Behaviors of XR Environments Using LLM-Assisted Analytics Framework},
author = {Y. Kim and Z. Aamir and M. Singh and S. Boorboor and K. Mueller and A. E. Kaufman},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105003815583&doi=10.1109%2FTVCG.2025.3549537&partnerID=40&md5=bc5ac38eb19faa224282cf385f43799f},
doi = {10.1109/TVCG.2025.3549537},
issn = {10772626 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {31},
number = {5},
pages = {2756–2766},
abstract = {We present Explainable XR, an end-to-end framework for analyzing user behavior in diverse eXtended Reality (XR) environments by leveraging Large Language Models (LLMs) for data interpretation assistance. Existing XR user analytics frameworks face challenges in handling cross-virtuality - AR, VR, MR - transitions, multi-user collaborative application scenarios, and the complexity of multimodal data. Explainable XR addresses these challenges by providing a virtuality-agnostic solution for the collection, analysis, and visualization of immersive sessions. We propose three main components in our framework: (1) A novel user data recording schema, called User Action Descriptor (UAD), that can capture the users' multimodal actions, along with their intents and the contexts; (2) a platform-agnostic XR session recorder, and (3) a visual analytics interface that offers LLM-assisted insights tailored to the analysts' perspectives, facilitating the exploration and analysis of the recorded XR session data. We demonstrate the versatility of Explainable XR by demonstrating five use-case scenarios, in both individual and collaborative XR applications across virtualities. Our technical evaluation and user studies show that Explainable XR provides a highly usable analytics solution for understanding user actions and delivering multifaceted, actionable insights into user behaviors in immersive environments. © 2025 Elsevier B.V., All rights reserved.},
note = {Publisher: IEEE Computer Society},
keywords = {adult, Agnostic, Article, Assistive, Cross Reality, Data Analytics, Data collection, data interpretation, Data recording, Data visualization, Extended reality, human, Language Model, Large language model, large language models, Multi-modal, Multimodal Data Collection, normal human, Personalized assistive technique, Personalized Assistive Techniques, recorder, Spatio-temporal data, therapy, user behavior, User behaviors, Virtual addresses, Virtual environments, Virtual Reality, Visual analytics, Visual languages},
pubstate = {published},
tppubtype = {article}
}
Wang, R. -G.; Tsai, C. H.; Tseng, M. C.; Hong, R. -C.; Syu, H.; Chou, C. -C.
Immersive Smart Meter Data Analytics: Leveraging eXtended Reality with LSTM and LLMs Proceedings Article
In: pp. 32–36, International Workshop on Computer Science and Engineering (WCSE), 2025.
Abstract | Links | BibTeX | Tags: Data Analytics, Data visualization, Decision making, Energy management, Energy-consumption, Exponential growth, Extended reality (XR), Forecasting, Human computer interaction, Immersive, Language Model, Large language model, large language models (LLMs), Long short-term memory, Long Short-Term Memory (LSTM), Short term memory, Smart Grid technologies, Smart Meters, Smart power grids, Visual analytics
@inproceedings{wang_immersive_2025,
title = {Immersive Smart Meter Data Analytics: Leveraging eXtended Reality with LSTM and LLMs},
author = {R. -G. Wang and C. H. Tsai and M. C. Tseng and R. -C. Hong and H. Syu and C. -C. Chou},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105017965008&doi=10.18178%2Fwcse.2025.06.006&partnerID=40&md5=866ab1ca8cdf0372c020f0131f1d68c1},
doi = {10.18178/wcse.2025.06.006},
year = {2025},
date = {2025-01-01},
pages = {32–36},
publisher = {International Workshop on Computer Science and Engineering (WCSE)},
abstract = {The rapid advancement of smart grid technologies has led to an exponential growth in smart meter data, creating new opportunities for more accurate energy consumption forecasting and immersive data visualization. This study proposes an integrated framework that combines eXtended Reality (XR), Long Short-Term Memory (LSTM) networks, and Large Language Models (LLMs) to enhance smart meter data analytics. The process begins with the application of LSTM to capture temporal dependencies in historical electricity usage data. Subsequently, the Large Language Models (LLMs) are employed to refine these textual forecasts, offering better predictions and explanations that are easily understandable by end-users. Finally, the enriched insights are presented through an XR environment, enabling users to interact with smart meter analytics in an immersive and intuitive way. By visualizing data trends, predictions, and explanatory narratives in a spatial computing interface, users can explore complex information more effectively. This multi-modal approach facilitates better decision-making for energy management, promotes user engagement, and supports smart city initiatives aiming for sustainable energy consumption. The integration of XR, LSTM, and LLMs technologies demonstrates a promising direction for future research and practical applications in smart energy systems. © 2025 Elsevier B.V., All rights reserved.},
keywords = {Data Analytics, Data visualization, Decision making, Energy management, Energy-consumption, Exponential growth, Extended reality (XR), Forecasting, Human computer interaction, Immersive, Language Model, Large language model, large language models (LLMs), Long short-term memory, Long Short-Term Memory (LSTM), Short term memory, Smart Grid technologies, Smart Meters, Smart power grids, Visual analytics},
pubstate = {published},
tppubtype = {inproceedings}
}