AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2024
Liang, Q.; Chen, Y.; Li, W.; Lai, M.; Ni, W.; Qiu, H.
In: L., Zhang; W., Yu; Q., Wang; Y., Laili; Y., Liu (Ed.): Commun. Comput. Info. Sci., pp. 12–24, Springer Science and Business Media Deutschland GmbH, 2024, ISBN: 18650929 (ISSN); 978-981973947-9 (ISBN).
Abstract | Links | BibTeX | Tags: Augmented Reality, Glass, Identity recognition, Internet of Things, Internet of things technologies, IoT, Language learning, Learning systems, LLM, Object Detection, Objects detection, Open Vocabulary Object Detection, Recognition systems, Semantics, Telephone sets, Translation (languages), Translation systems, Visual languages, Wearable computers, Wearable device, Wearable devices
@inproceedings{liang_iknowisee_2024,
title = {iKnowiSee: AR Glasses with Language Learning Translation System and Identity Recognition System Built Based on Large Pre-trained Models of Language and Vision and Internet of Things Technology},
author = {Q. Liang and Y. Chen and W. Li and M. Lai and W. Ni and H. Qiu},
editor = {Zhang L. and Yu W. and Wang Q. and Laili Y. and Liu Y.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85200663840&doi=10.1007%2f978-981-97-3948-6_2&partnerID=40&md5=a0324ba6108674b1d39a338574269d60},
doi = {10.1007/978-981-97-3948-6_2},
isbn = {18650929 (ISSN); 978-981973947-9 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {Commun. Comput. Info. Sci.},
volume = {2139 CCIS},
pages = {12–24},
publisher = {Springer Science and Business Media Deutschland GmbH},
abstract = {AR glasses used in daily life have made good progress and have some practical value.However, the current design concept of AR glasses is basically to simply port the content of a cell phone and act as a secondary screen for the phone. In contrast, the AR glasses we designed are based on actual situations, focus on real-world interactions, and utilize IoT technology with the aim of enabling users to fully extract and utilize the digital information in their lives. We have created two innovative features, one is a language learning translation system for users to learn foreign languages, which integrates a large language model with an open vocabulary recognition model to fully extract the visual semantic information of the scene; and the other is a social conferencing system, which utilizes the IoT cloud, pipe, edge, and end development to reduce the cost of communication and improve the efficiency of exchanges in social situations. © The Author(s), under exclusive license to Springer Nature Singapore Pte Ltd. 2024.},
keywords = {Augmented Reality, Glass, Identity recognition, Internet of Things, Internet of things technologies, IoT, Language learning, Learning systems, LLM, Object Detection, Objects detection, Open Vocabulary Object Detection, Recognition systems, Semantics, Telephone sets, Translation (languages), Translation systems, Visual languages, Wearable computers, Wearable device, Wearable devices},
pubstate = {published},
tppubtype = {inproceedings}
}
Janaka, N.
Towards Intelligent Wearable Assistants Proceedings Article
In: UbiComp Companion - Companion ACM Int. Jt. Conf. Pervasive Ubiquitous Comput., pp. 618–621, Association for Computing Machinery, Inc, 2024, ISBN: 979-840071058-2 (ISBN).
Abstract | Links | BibTeX | Tags: AI assistance, Augmented Reality, context-aware system, Context-aware systems, HMD, Interaction, interactions, Interruption, interruptions, MR, Notification, notifications, Smart glass, smart glasses, wearable, Wearable assistant, Wearable computers, XR
@inproceedings{janaka_towards_2024,
title = {Towards Intelligent Wearable Assistants},
author = {N. Janaka},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85206157248&doi=10.1145%2f3675094.3678989&partnerID=40&md5=539933fdbb3b5289b179cbe9e8f7c083},
doi = {10.1145/3675094.3678989},
isbn = {979-840071058-2 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {UbiComp Companion - Companion ACM Int. Jt. Conf. Pervasive Ubiquitous Comput.},
pages = {618–621},
publisher = {Association for Computing Machinery, Inc},
abstract = {This summary outlines my research toward developing intelligent wearable assistants that provide personalized, context-aware computing assistance. Previous work explored information presentation using smart glasses, socially-aware interactions, and applications for learning, communication, and documentation. Current research aims to develop tools for interaction research, including data collection, multimodal evaluation metrics, and a platform for creating context-aware AI assistants. Future goals include extending assistants to physical spaces via telepresence, optimizing learning with generative AI, and investigating collaborative human-AI learning. Ultimately, this research seeks to redefine how humans receive seamless support through proactive, intelligent wearable assistants that comprehend users and environments, augmenting capabilities while reducing reliance on manual labor. © 2024 Copyright held by the owner/author(s).},
keywords = {AI assistance, Augmented Reality, context-aware system, Context-aware systems, HMD, Interaction, interactions, Interruption, interruptions, MR, Notification, notifications, Smart glass, smart glasses, wearable, Wearable assistant, Wearable computers, XR},
pubstate = {published},
tppubtype = {inproceedings}
}