AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Jayanthy, S.; Selvaganesh, M.; Kumar, S. Sakthi; Sathish, A. Manjunatha; Sabarisan, K. M.; Arasi, T. Senthamil
Generative AI Solution for CNC Machines and Robotics Code Generation Proceedings Article
In: Institute of Electrical and Electronics Engineers Inc., 2025, ISBN: 9798331536695 (ISBN).
Abstract | Links | BibTeX | Tags: Adaptive control systems, Adversarial networks, Automated Code Generation, Automatic programming, CNC machine, CNC Machines, CNC system, Codegeneration, Computer aided instruction, Computer control, Computer control systems, E-Learning, Edge computing, Federated learning, Flow control, GANs, Generative pre-trained transformer transformer, GPT Transformers, Industrial research, Industry 4.0, Innovative approaches, Intelligent robots, Learning algorithms, Personnel training, Reinforcement Learning, Reinforcement learnings, Robotic systems, Simulation platform, Smart manufacturing, Virtual Reality
@inproceedings{jayanthy_generative_2025,
title = {Generative AI Solution for CNC Machines and Robotics Code Generation},
author = {S. Jayanthy and M. Selvaganesh and S. Sakthi Kumar and A. Manjunatha Sathish and K. M. Sabarisan and T. Senthamil Arasi},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105011963078&doi=10.1109%2FICCIES63851.2025.11033032&partnerID=40&md5=fb9143cd22dc48ae6c557f722cc2d6ab},
doi = {10.1109/ICCIES63851.2025.11033032},
isbn = {9798331536695 (ISBN)},
year = {2025},
date = {2025-01-01},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {The advent of Industry 4.0 has revolutionized the manufacturing landscape, driving significant advancements in automation and intelligence. This study introduces an innovative approach to automated code generation for CNC and robotic systems, leveraging Generative Adversarial Networks (GANs) and GPT(Generative Pre-trained Transformer) Transformers. These AI models enable precise and optimized code creation, minimizing manual errors. Adaptive process control, achieved through Reinforcement Learning (RL), allows real-time adjustments to operational parameters, enhancing performance in dynamic environments. The incorporation of natural language processing through Transformer models facilitates intuitive operator interactions via user-friendly interfaces. Immersive Virtual Reality (VR) technologies provide high-fidelity simulation and training platforms for realistic testing and control. Additionally, collaborative learning mechanisms, achieved through Federated Learning and Edge-cloud computing, support continuous improvement and scalable deployment. Impressive outcomes were attained by the system, including 90.5% training efficiency, 98.7% coding accuracy, 95.2% adaptability, and 93.4% operator satisfaction. Experimental results validate the system's superior accuracy, adaptability, and user-centric design, showcasing its potential to revolutionize manufacturing processes. This research sets a new benchmark for intelligent, efficient, and scalable automation in the Industry 4.0 era, paving the way for transformative innovations in smart manufacturing. © 2025 Elsevier B.V., All rights reserved.},
keywords = {Adaptive control systems, Adversarial networks, Automated Code Generation, Automatic programming, CNC machine, CNC Machines, CNC system, Codegeneration, Computer aided instruction, Computer control, Computer control systems, E-Learning, Edge computing, Federated learning, Flow control, GANs, Generative pre-trained transformer transformer, GPT Transformers, Industrial research, Industry 4.0, Innovative approaches, Intelligent robots, Learning algorithms, Personnel training, Reinforcement Learning, Reinforcement learnings, Robotic systems, Simulation platform, Smart manufacturing, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Tomkou, D.; Fatouros, G.; Andreou, A.; Makridis, G.; Liarokapis, F.; Dardanis, D.; Kiourtis, A.; Soldatos, J.; Kyriazis, D.
Bridging Industrial Expertise and XR with LLM-Powered Conversational Agents Proceedings Article
In: pp. 1050–1056, Institute of Electrical and Electronics Engineers Inc., 2025, ISBN: 9798331543723 (ISBN).
Abstract | Links | BibTeX | Tags: Air navigation, Conversational Agents, Conversational AI, Embeddings, Engineering education, Extended reality, Knowledge Management, Knowledge transfer, Language Model, Large language model, large language models, Personnel training, Remote Assistance, Retrieval-Augmented Generation, Robotics, Semantics, Smart manufacturing
@inproceedings{tomkou_bridging_2025,
title = {Bridging Industrial Expertise and XR with LLM-Powered Conversational Agents},
author = {D. Tomkou and G. Fatouros and A. Andreou and G. Makridis and F. Liarokapis and D. Dardanis and A. Kiourtis and J. Soldatos and D. Kyriazis},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105013837767&doi=10.1109%2FDCOSS-IoT65416.2025.00158&partnerID=40&md5=45e35086d8be9d3e16afeade6598d238},
doi = {10.1109/DCOSS-IoT65416.2025.00158},
isbn = {9798331543723 (ISBN)},
year = {2025},
date = {2025-01-01},
pages = {1050–1056},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {This paper introduces a novel integration of Retrieval-Augmented Generation (RAG) enhanced Large Language Models (LLMs) with Extended Reality (XR) technologies to address knowledge transfer challenges in industrial environments. The proposed system embeds domain-specific industrial knowledge into XR environments through a natural language interface, enabling hands-free, context-aware expert guidance for workers. We present the architecture of the proposed system consisting of an LLM Chat Engine with dynamic tool orchestration and an XR application featuring voice-driven interaction. Performance evaluation of various chunking strategies, embedding models, and vector databases reveals that semantic chunking, balanced embedding models, and efficient vector stores deliver optimal performance for industrial knowledge retrieval. The system's potential is demonstrated through early implementation in multiple industrial use cases, including robotic assembly, smart infrastructure maintenance, and aerospace component servicing. Results indicate potential for enhancing training efficiency, remote assistance capabilities, and operational guidance in alignment with Industry 5.0's human-centric and resilient approach to industrial development. © 2025 Elsevier B.V., All rights reserved.},
keywords = {Air navigation, Conversational Agents, Conversational AI, Embeddings, Engineering education, Extended reality, Knowledge Management, Knowledge transfer, Language Model, Large language model, large language models, Personnel training, Remote Assistance, Retrieval-Augmented Generation, Robotics, Semantics, Smart manufacturing},
pubstate = {published},
tppubtype = {inproceedings}
}
2024
Zheng, P.; Li, C.; Fan, J.; Wang, L.
In: CIRP Annals, vol. 73, no. 1, pp. 341–344, 2024, ISSN: 00078506 (ISSN).
Abstract | Links | BibTeX | Tags: Collaboration task, Collaborative manufacturing, Deep learning, Helmet mounted displays, Human robots, Human-centric, Human-guided robot learning, Human-Robot Collaboration, Interface states, Manipulators, Manufacturing system, Manufacturing tasks, Mixed reality, Mixed reality head-mounted displays, Reinforcement Learning, Reinforcement learnings, Robot vision, Smart manufacturing
@article{zheng_vision-language-guided_2024,
title = {A vision-language-guided and deep reinforcement learning-enabled approach for unstructured human-robot collaborative manufacturing task fulfilment},
author = {P. Zheng and C. Li and J. Fan and L. Wang},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85190754943&doi=10.1016%2fj.cirp.2024.04.003&partnerID=40&md5=59c453e1931e912472e76b86b77a881b},
doi = {10.1016/j.cirp.2024.04.003},
issn = {00078506 (ISSN)},
year = {2024},
date = {2024-01-01},
journal = {CIRP Annals},
volume = {73},
number = {1},
pages = {341–344},
abstract = {Human-Robot Collaboration (HRC) has emerged as a pivot in contemporary human-centric smart manufacturing scenarios. However, the fulfilment of HRC tasks in unstructured scenes brings many challenges to be overcome. In this work, mixed reality head-mounted display is modelled as an effective data collection, communication, and state representation interface/tool for HRC task settings. By integrating vision-language cues with large language model, a vision-language-guided HRC task planning approach is firstly proposed. Then, a deep reinforcement learning-enabled mobile manipulator motion control policy is generated to fulfil HRC task primitives. Its feasibility is demonstrated in several HRC unstructured manufacturing tasks with comparative results. © 2024 The Author(s)},
keywords = {Collaboration task, Collaborative manufacturing, Deep learning, Helmet mounted displays, Human robots, Human-centric, Human-guided robot learning, Human-Robot Collaboration, Interface states, Manipulators, Manufacturing system, Manufacturing tasks, Mixed reality, Mixed reality head-mounted displays, Reinforcement Learning, Reinforcement learnings, Robot vision, Smart manufacturing},
pubstate = {published},
tppubtype = {article}
}