AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2025
Guo, P.; Zhang, Q.; Tian, C.; Xue, W.; Feng, X.
Digital Human Techniques for Education Reform Proceedings Article
In: ICETM - Proc. Int. Conf. Educ. Technol. Manag., pp. 173–178, Association for Computing Machinery, Inc, 2025, ISBN: 979-840071746-8 (ISBN).
Abstract | Links | BibTeX | Tags: Augmented Reality, Contrastive Learning, Digital elevation model, Digital human technique, Digital Human Techniques, Digital humans, Education Reform, Education reforms, Educational Technology, Express emotions, Federated learning, Human behaviors, Human form models, Human techniques, Immersive, Innovative technology, Modeling languages, Natural language processing systems, Teachers', Teaching, Virtual environments, Virtual humans
@inproceedings{guo_digital_2025,
title = {Digital Human Techniques for Education Reform},
author = {P. Guo and Q. Zhang and C. Tian and W. Xue and X. Feng},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105001671326&doi=10.1145%2f3711403.3711428&partnerID=40&md5=dd96647315af9409d119f68f9cf4e980},
doi = {10.1145/3711403.3711428},
isbn = {979-840071746-8 (ISBN)},
year = {2025},
date = {2025-01-01},
booktitle = {ICETM - Proc. Int. Conf. Educ. Technol. Manag.},
pages = {173–178},
publisher = {Association for Computing Machinery, Inc},
abstract = {The rapid evolution of artificial intelligence, big data, and generative AI models has ushered in significant transformations across various sectors, including education. Digital Human Technique, an innovative technology grounded in advanced computer science and artificial intelligence, is reshaping educational paradigms by enabling virtual humans to simulate human behavior, express emotions, and interact with users. This paper explores the application of Digital Human Technique in education reform, focusing on creating immersive, intelligent classroom experiences that foster meaningful interactions between teachers and students. We define Digital Human Technique and delve into its key technical components such as character modeling and rendering, natural language processing, computer vision, and augmented reality technologies. Our methodology involves analyzing the role of educational digital humans created through these technologies, assessing their impact on educational processes, and examining various application scenarios in educational reform. Results indicate that Digital Human Technique significantly enhances the learning experience by enabling personalized teaching, increasing engagement, and fostering emotional connections. Educational digital humans serve as virtual teachers, interactive learning aids, and facilitators of emotional interaction, effectively addressing the challenges of traditional educational methods. They also promote a deeper understanding of complex concepts through simulated environments and interactive digital content. © 2024 Copyright held by the owner/author(s).},
keywords = {Augmented Reality, Contrastive Learning, Digital elevation model, Digital human technique, Digital Human Techniques, Digital humans, Education Reform, Education reforms, Educational Technology, Express emotions, Federated learning, Human behaviors, Human form models, Human techniques, Immersive, Innovative technology, Modeling languages, Natural language processing systems, Teachers', Teaching, Virtual environments, Virtual humans},
pubstate = {published},
tppubtype = {inproceedings}
}
2024
Fujii, A.; Fukuda, K.
Initial Study on Robot Emotional Expression Using Manpu Proceedings Article
In: ACM/IEEE Int. Conf. Hum.-Rob. Interact., pp. 463–467, IEEE Computer Society, 2024, ISBN: 21672148 (ISSN); 979-840070323-2 (ISBN).
Abstract | Links | BibTeX | Tags: Comic engineering, Comic symbol, Comic symbols, Display devices, Emotional expressions, Express emotions, Generic expression, Human Robot Interaction, Human robots, Human-robot interaction, Humans-robot interactions, Machine design, Man machine systems, Manpu, Mixed reality, Symbiotics, Symbolic methods
@inproceedings{fujii_initial_2024,
title = {Initial Study on Robot Emotional Expression Using Manpu},
author = {A. Fujii and K. Fukuda},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85188120223&doi=10.1145%2f3610978.3640652&partnerID=40&md5=4277cbd98c0474e2e7ba19d352e6f46e},
doi = {10.1145/3610978.3640652},
isbn = {21672148 (ISSN); 979-840070323-2 (ISBN)},
year = {2024},
date = {2024-01-01},
booktitle = {ACM/IEEE Int. Conf. Hum.-Rob. Interact.},
pages = {463–467},
publisher = {IEEE Computer Society},
abstract = {In recent years, robots have started to play an active role in various places in society. The ability of robots not only to convey information but also to interact emotionally, is necessary to realize a human-robot symbiotic society. Many studies have been conducted on the emotional expression of robots. However, as robots come in a wide variety of designs, it is difficult to construct a generic expression method, and some robots are not equipped with expression devices such as faces or displays. To address these problems, this research aims to develop technology that enables robots to express emotions, using Manpu (a symbolic method used in comic books, expressing not only the emotions of humans and animals but also the states of objects) and mixed reality technology. As the first step of the research, we categorize manpu and use large language models to generate manpu expressions according to the dialogue information. © 2024 Copyright held by the owner/author(s)},
keywords = {Comic engineering, Comic symbol, Comic symbols, Display devices, Emotional expressions, Express emotions, Generic expression, Human Robot Interaction, Human robots, Human-robot interaction, Humans-robot interactions, Machine design, Man machine systems, Manpu, Mixed reality, Symbiotics, Symbolic methods},
pubstate = {published},
tppubtype = {inproceedings}
}