AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2022
Augello, Agnese; Bella, Giulia Di; Infantino, Ignazio; Pilato, Giovanni; Vitale, Gianluigi
Multimodal Mood Recognition for Assistive Scenarios Proceedings Article
In: F.F., Samsonovich A. V. Ramos Corchado (Ed.): Procedia Computer Science, pp. 510–517, Elsevier B.V., 2022.
Abstract | Links | BibTeX | Tags: Assistive Robots, Emotion Analysis, Emotion Recognition, Mood
@inproceedings{augelloMultimodalMoodRecognition2022,
title = {Multimodal Mood Recognition for Assistive Scenarios},
author = { Agnese Augello and Giulia Di Bella and Ignazio Infantino and Giovanni Pilato and Gianluigi Vitale},
editor = { Samsonovich A.V. Ramos Corchado F.F.},
doi = {10.1016/j.procs.2022.11.098},
year = {2022},
date = {2022-01-01},
booktitle = {Procedia Computer Science},
volume = {213},
pages = {510--517},
publisher = {Elsevier B.V.},
abstract = {We illustrate a system performing multimodal human emotion detection from video input through the integration of audio emotional recognition, text emotional recognition, facial emotional recognition, and emotional recognition from a spectrogram. The outcomes of the four emotion recognition modalities are compared, and a final evaluation provides the most likely perceived emotion. The system has been designed to be easily implemented on cheap mini-computer based boards. It is conceived to be used as auxiliary tool in the field of telemedicine to remotely monitor the mood of patients and observe their healing process, which is closely related to their emotional condition. textcopyright 2022 The Author(s).},
keywords = {Assistive Robots, Emotion Analysis, Emotion Recognition, Mood},
pubstate = {published},
tppubtype = {inproceedings}
}
Augello, Agnese; Bella, Giulia Di; Infantino, Ignazio; Pilato, Giovanni; Vitale, Gianluigi
Multimodal Mood Recognition for Assistive Scenarios Proceedings Article
In: F.F., Samsonovich A. V. Ramos Corchado (Ed.): Procedia Computer Science, pp. 510–517, Elsevier B.V., 2022.
Abstract | Links | BibTeX | Tags: Assistive Robots, Emotion Analysis, Emotion Recognition, Mood
@inproceedings{augello_multimodal_2022,
title = {Multimodal Mood Recognition for Assistive Scenarios},
author = {Agnese Augello and Giulia Di Bella and Ignazio Infantino and Giovanni Pilato and Gianluigi Vitale},
editor = {Samsonovich A. V. Ramos Corchado F.F.},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85146111070&doi=10.1016%2fj.procs.2022.11.098&partnerID=40&md5=5f02507ae4efe3f3fe476ce3f7dbc63d},
doi = {10.1016/j.procs.2022.11.098},
year = {2022},
date = {2022-01-01},
booktitle = {Procedia Computer Science},
volume = {213},
pages = {510–517},
publisher = {Elsevier B.V.},
abstract = {We illustrate a system performing multimodal human emotion detection from video input through the integration of audio emotional recognition, text emotional recognition, facial emotional recognition, and emotional recognition from a spectrogram. The outcomes of the four emotion recognition modalities are compared, and a final evaluation provides the most likely perceived emotion. The system has been designed to be easily implemented on cheap mini-computer based boards. It is conceived to be used as auxiliary tool in the field of telemedicine to remotely monitor the mood of patients and observe their healing process, which is closely related to their emotional condition. © 2022 The Author(s).},
keywords = {Assistive Robots, Emotion Analysis, Emotion Recognition, Mood},
pubstate = {published},
tppubtype = {inproceedings}
}
2019
Augello, Agnese; Infantino, Ignazio; Maniscalco, Umberto; Pilato, Giovanni; Vella, Filippo
NarRob: A Humanoid Social Storyteller with Emotional Expression Capabilities Proceedings Article
In: relax AV Samsonovich, (Ed.): BIOLOGICALLY INSPIRED COGNITIVE ARCHITECTURES 2018, pp. 9–15, SPRINGER INTERNATIONAL PUBLISHING AG, GEWERBESTRASSE 11, CHAM, CH-6330, SWITZERLAND, 2019, ISBN: 978-3-319-99316-4 978-3-319-99315-7.
Abstract | Links | BibTeX | Tags: Education, Emotion Recognition, Gestural user interfaces, Robotics, Storytelling Robots
@inproceedings{augelloNarRobHumanoidSocial2019,
title = {NarRob: A Humanoid Social Storyteller with Emotional Expression Capabilities},
author = { Agnese Augello and Ignazio Infantino and Umberto Maniscalco and Giovanni Pilato and Filippo Vella},
editor = { {relax AV} Samsonovich},
doi = {10.1007/978-3-319-99316-4_2},
isbn = {978-3-319-99316-4 978-3-319-99315-7},
year = {2019},
date = {2019-01-01},
booktitle = {BIOLOGICALLY INSPIRED COGNITIVE ARCHITECTURES 2018},
volume = {848},
pages = {9--15},
publisher = {SPRINGER INTERNATIONAL PUBLISHING AG},
address = {GEWERBESTRASSE 11, CHAM, CH-6330, SWITZERLAND},
series = {Advances in Intelligent Systems and Computing},
abstract = {In this paper we propose a model of a robotic storyteller, focusing on its abilities to select the most appropriate gestures to accompany the story, trying to manifest also emotions related to the sentence that is being told. The robot is endowed with a repository of stories together with a set of gestures, inspired by those typically used by humans, that the robot learns by observation. The gestures are annotated by a number N of subjects, according to their particular meaning and considering a specific typology. They are exploited by the robot according to the story content to provide an engaging representation of the tale.},
keywords = {Education, Emotion Recognition, Gestural user interfaces, Robotics, Storytelling Robots},
pubstate = {published},
tppubtype = {inproceedings}
}
Augello, Agnese; Infantino, Ignazio; Maniscalco, Umberto; Pilato, Giovanni; Vella, Filippo
NarRob: A Humanoid Social Storyteller with Emotional Expression Capabilities Proceedings Article
In: Samsonovich, AV (Ed.): BIOLOGICALLY INSPIRED COGNITIVE ARCHITECTURES 2018, pp. 9–15, SPRINGER INTERNATIONAL PUBLISHING AG, GEWERBESTRASSE 11, CHAM, CH-6330, SWITZERLAND, 2019, ISBN: 978-3-319-99316-4 978-3-319-99315-7.
Abstract | Links | BibTeX | Tags: Education, Emotion Recognition, Gestural user interfaces, Robotics, Storytelling Robots
@inproceedings{augello_narrob_2019,
title = {NarRob: A Humanoid Social Storyteller with Emotional Expression Capabilities},
author = {Agnese Augello and Ignazio Infantino and Umberto Maniscalco and Giovanni Pilato and Filippo Vella},
editor = {AV Samsonovich},
doi = {10.1007/978-3-319-99316-4_2},
isbn = {978-3-319-99316-4 978-3-319-99315-7},
year = {2019},
date = {2019-01-01},
booktitle = {BIOLOGICALLY INSPIRED COGNITIVE ARCHITECTURES 2018},
volume = {848},
pages = {9–15},
publisher = {SPRINGER INTERNATIONAL PUBLISHING AG},
address = {GEWERBESTRASSE 11, CHAM, CH-6330, SWITZERLAND},
series = {Advances in Intelligent Systems and Computing},
abstract = {In this paper we propose a model of a robotic storyteller, focusing on its abilities to select the most appropriate gestures to accompany the story, trying to manifest also emotions related to the sentence that is being told. The robot is endowed with a repository of stories together with a set of gestures, inspired by those typically used by humans, that the robot learns by observation. The gestures are annotated by a number N of subjects, according to their particular meaning and considering a specific typology. They are exploited by the robot according to the story content to provide an engaging representation of the tale.},
keywords = {Education, Emotion Recognition, Gestural user interfaces, Robotics, Storytelling Robots},
pubstate = {published},
tppubtype = {inproceedings}
}
2017
Gentile, Vito; Milazzo, Fabrizio; Sorce, Salvatore; Gentile, Antonio; Augello, Agnese; Pilato, Giovanni
Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions Proceedings Article
In: Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017, pp. 69–72, Institute of Electrical and Electronics Engineers Inc., 2017, ISBN: 978-1-5090-4896-0.
Abstract | Links | BibTeX | Tags: Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis
@inproceedings{gentileBodyGesturesSpoken2017,
title = {Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions},
author = { Vito Gentile and Fabrizio Milazzo and Salvatore Sorce and Antonio Gentile and Agnese Augello and Giovanni Pilato},
doi = {10.1109/ICSC.2017.14},
isbn = {978-1-5090-4896-0},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017},
pages = {69--72},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {In the last decade, there has been a growing interest in emotion analysis research, which has been applied in several areas of computer science. Many authors have contributed to the development of emotion recognition algorithms, considering textual or non verbal data as input, such as facial expressions, gestures or, in the case of multi-modal emotion recognition, a combination of them. In this paper, we describe a method to detect emotions from gestures using the skeletal data obtained from Kinect-like devices as input, as well as a textual description of their meaning. The experimental results show that the correlation existing between body movements and spoken user sentence(s) can be used to reveal user's emotions from gestures. textcopyright 2017 IEEE.},
keywords = {Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Milazzo, Fabrizio; Augello, Agnese; Pilato, Giovanni; Gentile, Vito; Gentile, Antonio; Sorce, Salvatore
Exploiting Correlation between Body Gestures and Spoken Sentences for Real-Time Emotion Recognition Proceedings Article
In: ACM International Conference Proceeding Series, Association for Computing Machinery, 2017, ISBN: 978-1-4503-5237-6.
Abstract | Links | BibTeX | Tags: Emotion Recognition, Gestural user interfaces, Human computer interaction, Sentiment Analysis
@inproceedings{milazzoExploitingCorrelationBody2017,
title = {Exploiting Correlation between Body Gestures and Spoken Sentences for Real-Time Emotion Recognition},
author = { Fabrizio Milazzo and Agnese Augello and Giovanni Pilato and Vito Gentile and Antonio Gentile and Salvatore Sorce},
doi = {10.1145/3125571.3125590},
isbn = {978-1-4503-5237-6},
year = {2017},
date = {2017-01-01},
booktitle = {ACM International Conference Proceeding Series},
volume = {Part F131371},
publisher = {Association for Computing Machinery},
abstract = {Humans communicate their affective states through different media, both verbal and non-verbal, often used at the same time. The knowledge of the emotional state plays a key role to provide personalized and context-related information and services. This is the main reason why several algorithms have been proposed in the last few years for the automatic emotion recognition. In this work we exploit the correlation between one's affective state and the simultaneous body expressions in terms of speech and gestures. Herewe propose a system for real-Time emotion recognition from gestures. In a first step, the system builds a trusted dataset of association pairs (motion datatextrightarrow emotion pattern), also based on textual information. Such dataset is the ground truth for a further step, where emotion patterns can be extracted from new unclassified gestures. Experimental results demonstrate a good recognition accuracy and real-Time capabilities of the proposed system. textcopyright 2017 Copyright held by the owner/author(s).},
keywords = {Emotion Recognition, Gestural user interfaces, Human computer interaction, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Gentile, Vito; Milazzo, Fabrizio; Sorce, Salvatore; Gentile, Antonio; Augello, Agnese; Pilato, Giovanni
Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions Proceedings Article
In: Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017, pp. 69–72, Institute of Electrical and Electronics Engineers Inc., 2017, ISBN: 978-1-5090-4896-0.
Abstract | Links | BibTeX | Tags: Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis
@inproceedings{gentile_body_2017,
title = {Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions},
author = {Vito Gentile and Fabrizio Milazzo and Salvatore Sorce and Antonio Gentile and Agnese Augello and Giovanni Pilato},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85018255013&doi=10.1109%2fICSC.2017.14&partnerID=40&md5=23d8bb016146afe5e384b12d84f3fb85},
doi = {10.1109/ICSC.2017.14},
isbn = {978-1-5090-4896-0},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017},
pages = {69–72},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {In the last decade, there has been a growing interest in emotion analysis research, which has been applied in several areas of computer science. Many authors have contributed to the development of emotion recognition algorithms, considering textual or non verbal data as input, such as facial expressions, gestures or, in the case of multi-modal emotion recognition, a combination of them. In this paper, we describe a method to detect emotions from gestures using the skeletal data obtained from Kinect-like devices as input, as well as a textual description of their meaning. The experimental results show that the correlation existing between body movements and spoken user sentence(s) can be used to reveal user's emotions from gestures. © 2017 IEEE.},
keywords = {Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Milazzo, Fabrizio; Augello, Agnese; Pilato, Giovanni; Gentile, Vito; Gentile, Antonio; Sorce, Salvatore
Exploiting correlation between body gestures and spoken sentences for real-Time emotion recognition Proceedings Article
In: ACM International Conference Proceeding Series, Association for Computing Machinery, 2017, ISBN: 978-1-4503-5237-6.
Abstract | Links | BibTeX | Tags: Emotion Recognition, Gestural user interfaces, Human computer interaction, Sentiment Analysis
@inproceedings{milazzo_exploiting_2017,
title = {Exploiting correlation between body gestures and spoken sentences for real-Time emotion recognition},
author = {Fabrizio Milazzo and Agnese Augello and Giovanni Pilato and Vito Gentile and Antonio Gentile and Salvatore Sorce},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85034630360&doi=10.1145%2f3125571.3125590&partnerID=40&md5=b3d73715f756aded80e6b0a330ace70a},
doi = {10.1145/3125571.3125590},
isbn = {978-1-4503-5237-6},
year = {2017},
date = {2017-01-01},
booktitle = {ACM International Conference Proceeding Series},
volume = {Part F131371},
publisher = {Association for Computing Machinery},
abstract = {Humans communicate their affective states through different media, both verbal and non-verbal, often used at the same time. The knowledge of the emotional state plays a key role to provide personalized and context-related information and services. This is the main reason why several algorithms have been proposed in the last few years for the automatic emotion recognition. In this work we exploit the correlation between one's affective state and the simultaneous body expressions in terms of speech and gestures. Herewe propose a system for real-Time emotion recognition from gestures. In a first step, the system builds a trusted dataset of association pairs (motion data→emotion pattern), also based on textual information. Such dataset is the ground truth for a further step, where emotion patterns can be extracted from new unclassified gestures. Experimental results demonstrate a good recognition accuracy and real-Time capabilities of the proposed system. © 2017 Copyright held by the owner/author(s).},
keywords = {Emotion Recognition, Gestural user interfaces, Human computer interaction, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}