AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2022
Augello, Agnese; Infantino, Ignazio; Pilato, Giovanni; Vitale, Gianpaolo
Extending Affective Capabilities for Medical Assistive Robots Journal Article
In: Cognitive Systems Research, vol. 73, pp. 21–25, 2022, ISSN: 13890417.
Abstract | Links | BibTeX | Tags: Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing
@article{augelloExtendingAffectiveCapabilities2022,
title = {Extending Affective Capabilities for Medical Assistive Robots},
author = { Agnese Augello and Ignazio Infantino and Giovanni Pilato and Gianpaolo Vitale},
doi = {10.1016/j.cogsys.2021.12.004},
issn = {13890417},
year = {2022},
date = {2022-01-01},
journal = {Cognitive Systems Research},
volume = {73},
pages = {21--25},
abstract = {In this work, we discuss methodologies and implementation choices to enable a humanoid robot to estimate patients' mood and emotions during postoperative home rehabilitation. The approach is modular and it has been implemented into a SoftBank Pepper robotic architecture; however, the approach is general and it can be easily adapted to other robotic platforms. A sample of an interactive session for the detection of the patient's affective state is also reported. textcopyright 2022 Elsevier B.V.},
keywords = {Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing},
pubstate = {published},
tppubtype = {article}
}
Augello, Agnese; Infantino, Ignazio; Pilato, Giovanni; Vitale, Gianpaolo
Extending affective capabilities for medical assistive robots Journal Article
In: Cognitive Systems Research, vol. 73, pp. 21–25, 2022, ISSN: 13890417.
Abstract | Links | BibTeX | Tags: Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing
@article{augello_extending_2022,
title = {Extending affective capabilities for medical assistive robots},
author = {Agnese Augello and Ignazio Infantino and Giovanni Pilato and Gianpaolo Vitale},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85123046436&doi=10.1016%2fj.cogsys.2021.12.004&partnerID=40&md5=6e76332f7f95333a9ae2e8f11c054622},
doi = {10.1016/j.cogsys.2021.12.004},
issn = {13890417},
year = {2022},
date = {2022-01-01},
journal = {Cognitive Systems Research},
volume = {73},
pages = {21–25},
abstract = {In this work, we discuss methodologies and implementation choices to enable a humanoid robot to estimate patients’ mood and emotions during postoperative home rehabilitation. The approach is modular and it has been implemented into a SoftBank Pepper robotic architecture; however, the approach is general and it can be easily adapted to other robotic platforms. A sample of an interactive session for the detection of the patient's affective state is also reported. © 2022 Elsevier B.V.},
keywords = {Anthropomorphic Robots, Assistive Robots, Emotion Detection, Facial Expressions, Human computer interaction, Human Robot Interaction, Humanoid Robots, Natural Language Processing, Robotics, Wellbeing},
pubstate = {published},
tppubtype = {article}
}
2017
Gentile, Vito; Milazzo, Fabrizio; Sorce, Salvatore; Gentile, Antonio; Augello, Agnese; Pilato, Giovanni
Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions Proceedings Article
In: Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017, pp. 69–72, Institute of Electrical and Electronics Engineers Inc., 2017, ISBN: 978-1-5090-4896-0.
Abstract | Links | BibTeX | Tags: Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis
@inproceedings{gentileBodyGesturesSpoken2017,
title = {Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions},
author = { Vito Gentile and Fabrizio Milazzo and Salvatore Sorce and Antonio Gentile and Agnese Augello and Giovanni Pilato},
doi = {10.1109/ICSC.2017.14},
isbn = {978-1-5090-4896-0},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017},
pages = {69--72},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {In the last decade, there has been a growing interest in emotion analysis research, which has been applied in several areas of computer science. Many authors have contributed to the development of emotion recognition algorithms, considering textual or non verbal data as input, such as facial expressions, gestures or, in the case of multi-modal emotion recognition, a combination of them. In this paper, we describe a method to detect emotions from gestures using the skeletal data obtained from Kinect-like devices as input, as well as a textual description of their meaning. The experimental results show that the correlation existing between body movements and spoken user sentence(s) can be used to reveal user's emotions from gestures. textcopyright 2017 IEEE.},
keywords = {Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Gentile, Vito; Milazzo, Fabrizio; Sorce, Salvatore; Gentile, Antonio; Augello, Agnese; Pilato, Giovanni
Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions Proceedings Article
In: Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017, pp. 69–72, Institute of Electrical and Electronics Engineers Inc., 2017, ISBN: 978-1-5090-4896-0.
Abstract | Links | BibTeX | Tags: Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis
@inproceedings{gentile_body_2017,
title = {Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions},
author = {Vito Gentile and Fabrizio Milazzo and Salvatore Sorce and Antonio Gentile and Agnese Augello and Giovanni Pilato},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85018255013&doi=10.1109%2fICSC.2017.14&partnerID=40&md5=23d8bb016146afe5e384b12d84f3fb85},
doi = {10.1109/ICSC.2017.14},
isbn = {978-1-5090-4896-0},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017},
pages = {69–72},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {In the last decade, there has been a growing interest in emotion analysis research, which has been applied in several areas of computer science. Many authors have contributed to the development of emotion recognition algorithms, considering textual or non verbal data as input, such as facial expressions, gestures or, in the case of multi-modal emotion recognition, a combination of them. In this paper, we describe a method to detect emotions from gestures using the skeletal data obtained from Kinect-like devices as input, as well as a textual description of their meaning. The experimental results show that the correlation existing between body movements and spoken user sentence(s) can be used to reveal user's emotions from gestures. © 2017 IEEE.},
keywords = {Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}