AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2023
Vlasov, A. V.
GALA Inspired by Klimt's Art: Text-to-image Processing with Implementation in Interaction and Perception Studies: Library and Case Examples Journal Article
In: Annual Review of CyberTherapy and Telemedicine, vol. 21, pp. 200–205, 2023, ISSN: 15548716 (ISSN).
Abstract | Links | BibTeX | Tags: AIGC, applied research, art library, Article, Artificial intelligence, benchmarking, dataset, GALA, human, Human computer interaction, Image processing, Klimt, library, life satisfaction, neuropoem, Text-to-image, Virtual Reality, Wellbeing
@article{vlasov_gala_2023,
title = {GALA Inspired by Klimt's Art: Text-to-image Processing with Implementation in Interaction and Perception Studies: Library and Case Examples},
author = {A. V. Vlasov},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85182461798&partnerID=40&md5=0c3f5f4214a46db51f46f0092495eb2b},
issn = {15548716 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {Annual Review of CyberTherapy and Telemedicine},
volume = {21},
pages = {200–205},
abstract = {Objectives: (a) to develop a library with AI generated content (AIGC) based on а combinatorial scheme of prompting for interaction and perception research; (b) to show examples of AIGC implementation. The result is a public library for applied research in the cyber-psychological community (CYPSY). The Generative Art Library Abstractions (GALA) include images (Figures 1-2) based on the text-image model and inspired by the artwork of Gustav Klimt. They can be used for comparative analysis (benchmarking), end-to-end evaluation, and advanced design. This allows experimentation with complex human-computer interaction (HCI) architectures and visual communication systems, and provides creative design support for experimenting. Examples include: interactive perception of positively colored generative images; HCI dialogues using visual language; generated moods in a VR environment; brain-computer interface for HCI. Respectfully, these visualization resources are a valuable example of AIGC for next-generation R&D. Any suggestions from the CYPSY community are welcome. © 2023, Interactive Media Institute. All rights reserved.},
keywords = {AIGC, applied research, art library, Article, Artificial intelligence, benchmarking, dataset, GALA, human, Human computer interaction, Image processing, Klimt, library, life satisfaction, neuropoem, Text-to-image, Virtual Reality, Wellbeing},
pubstate = {published},
tppubtype = {article}
}
2022
Augello, Agnese; Infantino, Ignazio; Pilato, Giovanni; Vitale, Gianpaolo
Extending Affective Capabilities for Medical Assistive Robots Journal Article
In: Cognitive Systems Research, vol. 73, pp. 21–25, 2022, ISSN: 13890417.
Abstract | Links | BibTeX | Tags: Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing
@article{augelloExtendingAffectiveCapabilities2022,
title = {Extending Affective Capabilities for Medical Assistive Robots},
author = { Agnese Augello and Ignazio Infantino and Giovanni Pilato and Gianpaolo Vitale},
doi = {10.1016/j.cogsys.2021.12.004},
issn = {13890417},
year = {2022},
date = {2022-01-01},
journal = {Cognitive Systems Research},
volume = {73},
pages = {21--25},
abstract = {In this work, we discuss methodologies and implementation choices to enable a humanoid robot to estimate patients' mood and emotions during postoperative home rehabilitation. The approach is modular and it has been implemented into a SoftBank Pepper robotic architecture; however, the approach is general and it can be easily adapted to other robotic platforms. A sample of an interactive session for the detection of the patient's affective state is also reported. textcopyright 2022 Elsevier B.V.},
keywords = {Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing},
pubstate = {published},
tppubtype = {article}
}
Augello, Agnese; Infantino, Ignazio; Pilato, Giovanni; Vitale, Gianpaolo
Extending affective capabilities for medical assistive robots Journal Article
In: Cognitive Systems Research, vol. 73, pp. 21–25, 2022, ISSN: 13890417.
Abstract | Links | BibTeX | Tags: Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing
@article{augello_extending_2022,
title = {Extending affective capabilities for medical assistive robots},
author = {Agnese Augello and Ignazio Infantino and Giovanni Pilato and Gianpaolo Vitale},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85123046436&doi=10.1016%2fj.cogsys.2021.12.004&partnerID=40&md5=6e76332f7f95333a9ae2e8f11c054622},
doi = {10.1016/j.cogsys.2021.12.004},
issn = {13890417},
year = {2022},
date = {2022-01-01},
journal = {Cognitive Systems Research},
volume = {73},
pages = {21–25},
abstract = {In this work, we discuss methodologies and implementation choices to enable a humanoid robot to estimate patients’ mood and emotions during postoperative home rehabilitation. The approach is modular and it has been implemented into a SoftBank Pepper robotic architecture; however, the approach is general and it can be easily adapted to other robotic platforms. A sample of an interactive session for the detection of the patient's affective state is also reported. © 2022 Elsevier B.V.},
keywords = {Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing},
pubstate = {published},
tppubtype = {article}
}