AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2024
Geetha, S.; Aditya, G.; Reddy, M. Chetan; Nischith, G.
Human Interaction in Virtual and Mixed Reality Through Hand Tracking Proceedings Article
In: Proc. CONECCT - IEEE Int. Conf. Electron., Comput. Commun. Technol., Institute of Electrical and Electronics Engineers Inc., 2024, ISBN: 979-835038592-2 (ISBN).
Abstract | Links | BibTeX | Tags: Computer interaction, Computer simulation languages, Daily lives, Digital elevation model, Hand gesture, hand tracking, Hand-tracking, human-computer interaction, Humaninteraction, Interaction dynamics, Mixed reality, Unity, User friendly interface, User interfaces, Virtual environments, Virtual Reality, Virtual spaces
@inproceedings{geetha_human_2024,
title = {Human Interaction in Virtual and Mixed Reality Through Hand Tracking},
author = {S. Geetha and G. Aditya and M. Chetan Reddy and G. Nischith},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85205768661&doi=10.1109%2fCONECCT62155.2024.10677239&partnerID=40&md5=173e590ca9a1e30b760d05af562f311a},
doi = {10.1109/CONECCT62155.2024.10677239},
isbn = {979-835038592-2 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Proc. CONECCT - IEEE Int. Conf. Electron., Comput. Commun. Technol.},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {This paper explores the potential and possibilities of hand tracking in virtual reality (VR) and mixed reality (MR), focusing on its role in human interaction dynamics. An application was designed in Unity leveraging the XR Interaction toolkit, within which various items across three important domains: daily life, education, and recreation, were crafted to demonstrate the versatility of hand tracking along with hand gesture-based shortcuts for interaction. Integration of elements in MR ensures that users can seamlessly enjoy virtual experiences while remaining connected to their physical surroundings. Precise hand tracking enables effortless interaction with the virtual space, enhancing presence and control with a user-friendly interface. Additionally, the paper explores the effectiveness of integrating hand tracking into education and training scenarios. A computer assembly simulation was created to demonstrate this, featuring component inspection and zoom capabilities along with a large language model (LLM) integrated with hand gestures to provide for interaction capabilities. © 2024 IEEE.},
keywords = {Computer interaction, Computer simulation languages, Daily lives, Digital elevation model, Hand gesture, hand tracking, Hand-tracking, human-computer interaction, Humaninteraction, Interaction dynamics, Mixed reality, Unity, User friendly interface, User interfaces, Virtual environments, Virtual Reality, Virtual spaces},
pubstate = {published},
tppubtype = {inproceedings}
}
Xiao, Z.; Wang, T.; Wang, J.; Cao, J.; Zhang, W.; Dai, B.; Lin, D.; Pang, J.
UNIFIED HUMAN-SCENE INTERACTION VIA PROMPTED CHAIN-OF-CONTACTS Proceedings Article
In: Int. Conf. Learn. Represent., ICLR, International Conference on Learning Representations, ICLR, 2024.
Abstract | Links | BibTeX | Tags: Contact regions, Human joints, Interaction controls, Interaction framework, Quality control, Scene interactions, Strong correlation, Task executions, Task plan, Unified control, User friendly interface, Virtual Reality
@inproceedings{xiao_unified_2024,
title = {UNIFIED HUMAN-SCENE INTERACTION VIA PROMPTED CHAIN-OF-CONTACTS},
author = {Z. Xiao and T. Wang and J. Wang and J. Cao and W. Zhang and B. Dai and D. Lin and J. Pang},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85189112121&partnerID=40&md5=ed6c80431e6c18f32cdb9dd013fd60d0},
year = {2024},
date = {2024-01-01},
booktitle = {Int. Conf. Learn. Represent., ICLR},
publisher = {International Conference on Learning Representations, ICLR},
abstract = {Human-Scene Interaction (HSI) is a vital component of fields like embodied AI and virtual reality. Despite advancements in motion quality and physical plausibility, two pivotal factors, versatile interaction control and user-friendly interfaces, require further exploration for the practical application of HSI. This paper presents a unified HSI framework, named UniHSI, that supports unified control of diverse interactions through language commands. The framework defines interaction as “Chain of Contacts (CoC)”, representing steps involving human joint-object part pairs. This concept is inspired by the strong correlation between interaction types and corresponding contact regions. Based on the definition, UniHSI constitutes a Large Language Model (LLM) Planner to translate language prompts into task plans in the form of CoC, and a Unified Controller that turns CoC into uniform task execution. To support training and evaluation, we collect a new dataset named ScenePlan that encompasses thousands of task plans generated by LLMs based on diverse scenarios. Comprehensive experiments demonstrate the effectiveness of our framework in versatile task execution and generalizability to real scanned scenes. © 2024 12th International Conference on Learning Representations, ICLR 2024. All rights reserved.},
keywords = {Contact regions, Human joints, Interaction controls, Interaction framework, Quality control, Scene interactions, Strong correlation, Task executions, Task plan, Unified control, User friendly interface, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}