AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2024
Asra, S. A.; Wickramarathne, J.
Artificial Intelligence (AI) in Augmented Reality (AR), Virtual Reality (VR) and Mixed Reality (MR) Experiences: Enhancing Immersion and Interaction for User Experiences Proceedings Article
In: B., Luo; S.K., Sahoo; Y.H., Lee; C.H.T., Lee; M., Ong; A., Alphones (Ed.): IEEE Reg 10 Annu Int Conf Proc TENCON, pp. 1700–1705, Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 21593442 (ISSN); 979-835035082-1 (ISBN).
Abstract | Links | BibTeX | Tags: AI, AR, Emersion experience, Immersive augmented realities, Mixed reality, MR, Primary sources, Real-world, Secondary sources, Training simulation, Users' experiences, Video game simulation, Video training, Virtual environments, VR
@inproceedings{asra_artificial_2024,
title = {Artificial Intelligence (AI) in Augmented Reality (AR), Virtual Reality (VR) and Mixed Reality (MR) Experiences: Enhancing Immersion and Interaction for User Experiences},
author = {S. A. Asra and J. Wickramarathne},
editor = {Luo B. and Sahoo S.K. and Lee Y.H. and Lee C.H.T. and Ong M. and Alphones A.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105000443498&doi=10.1109%2fTENCON61640.2024.10902724&partnerID=40&md5=2ff92b5e2529ae7fe797cd8026e8065d},
doi = {10.1109/TENCON61640.2024.10902724},
isbn = {21593442 (ISSN); 979-835035082-1 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {IEEE Reg 10 Annu Int Conf Proc TENCON},
pages = {1700–1705},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {The utilisation of Artificial Intelligence (AI) generated material is one of the most fascinating advancements in the rapidly growing fields of Virtual Reality (VR), Augmented Reality (AR), and Mixed Reality (MR). Two examples of how AI-generated material is revolutionising how we interact with AR, VR and MR are video games and training simulations. In this essay, we'll examine the intriguing potential of AI-generated content and how it's being used to the development of hybrid real-world/virtual experiences. Using this strategy, we acquired the information from primary and secondary sources. We surveyed AR, VR, and MR users to compile the data for the primary source. Then, utilising published papers as a secondary source, information was gathered. By elucidating the concept of context immersion, this research can lay the foundation for the advancement of information regarding immersive AR, VR, and MR contexts. We are able to offer recommendations for overcoming the weak parts and strengthening the good ones based on the questionnaire survey findings. © 2024 IEEE.},
keywords = {AI, AR, Emersion experience, Immersive augmented realities, Mixed reality, MR, Primary sources, Real-world, Secondary sources, Training simulation, Users' experiences, Video game simulation, Video training, Virtual environments, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Shih, N. -J.; Kung, C. -H.
grARffiti: The Reconstruction and Deployment of Augmented Reality (AR) Graffiti Journal Article
In: Technologies, vol. 12, no. 9, 2024, ISSN: 22277080 (ISSN).
Abstract | Links | BibTeX | Tags: AR, Generative AI, governance, graffiti, urban fabric
@article{shih_grarffiti_2024,
title = {grARffiti: The Reconstruction and Deployment of Augmented Reality (AR) Graffiti},
author = {N. -J. Shih and C. -H. Kung},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85205252201&doi=10.3390%2ftechnologies12090169&partnerID=40&md5=4b11e12f077e8398f3578a8126ff156b},
doi = {10.3390/technologies12090169},
issn = {22277080 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {Technologies},
volume = {12},
number = {9},
abstract = {Graffiti relies on social instrumentation for its creation on spatial structures. It is questioned whether different mechanisms exist to transfer social and spatial hierarchies under a new model for better engagement, management, and governance. This research aims to replace physical graffiti using augmented reality (AR) in smartphones. Contact-free AR graffiti starts with the creation of 3D graffiti; this is followed by an AR cloud platform upload, quick response (QR) code access, and site deployment, leading to the secondary reconstruction of a field scene using smartphone screenshots. The working structure was created based on the first 3D reconstruction of graffiti details as AR models and second 3D reconstruction of field graffiti on different backgrounds using a photogrammetry method. The 3D graffiti can be geotagged as a personal map and 3D printed for collections. This culture-engaged AR creates a two-way method of interacting with spatial structures where the result is collected as a self-governed form of social media. The reinterpreted context is represented by a virtual 3D sticker or symbolized name card shared on the cloud. The hidden or social hierarchy was reinterpreted by a sense of ritual without altering any space. The application of digital stickers in AR redefines the spatial order, typology, and governance of graffiti. © 2024 by the authors.},
keywords = {AR, Generative AI, governance, graffiti, urban fabric},
pubstate = {published},
tppubtype = {article}
}
Klein, A.; Arnowitz, E.
AI in mixed reality - Copilot on HoloLens: Spatial computing with large language models Proceedings Article
In: S.N., Spencer (Ed.): Proc. - SIGGRAPH Real-Time Live!, Association for Computing Machinery, Inc, 2024, ISBN: 979-840070526-7 (ISBN).
Abstract | Links | BibTeX | Tags: 3D, AI, AR, Gesture, Gestures, HoloLens, Language Model, LLM, Mixed reality, Real- time, Real-time, Spatial computing, User experience design, User interfaces, Voice
@inproceedings{klein_ai_2024,
title = {AI in mixed reality - Copilot on HoloLens: Spatial computing with large language models},
author = {A. Klein and E. Arnowitz},
editor = {Spencer S.N.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85200657459&doi=10.1145%2f3641520.3665305&partnerID=40&md5=07d385771b8813c1fafa0efb7ae7e9f2},
doi = {10.1145/3641520.3665305},
isbn = {979-840070526-7 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Proc. - SIGGRAPH Real-Time Live!},
publisher = {Association for Computing Machinery, Inc},
abstract = {Mixed reality together with AI presents a human-first interface that promises to transform operations. Copilot can assist industrial workers in real-time with speech and holograms; generative AI is used to search technical documentation, service records, training content, and other sources. Copilot then summarizes to provide interactive guidance. © 2024 Owner/Author.},
keywords = {3D, AI, AR, Gesture, Gestures, HoloLens, Language Model, LLM, Mixed reality, Real- time, Real-time, Spatial computing, User experience design, User interfaces, Voice},
pubstate = {published},
tppubtype = {inproceedings}
}