AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2022
Augello, Agnese
Unveiling the Reasoning Processes of Robots through Introspective Dialogues in a Storytelling System: A Study on the Elicited Empathy Journal Article
In: Cognitive Systems Research, vol. 73, pp. 12–20, 2022, ISSN: 13890417.
Abstract | Links | BibTeX | Tags: ACT-R, Anthropomorphic Robots, Cognitive Architectures, Cognitive Systems, Empathy, Gestural user interfaces, Introspective Dialogs, Personality, Reasoning, Social Agents, Social Practices, Storytelling Robots
@article{augelloUnveilingReasoningProcesses2022,
title = {Unveiling the Reasoning Processes of Robots through Introspective Dialogues in a Storytelling System: A Study on the Elicited Empathy},
author = { Agnese Augello},
doi = {10.1016/j.cogsys.2021.11.006},
issn = {13890417},
year = {2022},
date = {2022-01-01},
journal = {Cognitive Systems Research},
volume = {73},
pages = {12--20},
abstract = {The work studies the empathy elicited by a storytelling system in which the characters in the story are interpreted by humanoid robots and modelled as cognitive agents. The ACT-R architecture is exploited to shape the characters' personalities and equip them with knowledge and behaviours typical of social practices. The narration is enriched with gestures and emotional expressions obtained by setting parameters that can be correlated to some emotions, such as the pitch and speech rate, the LEDs colour and the head inclination. The system has been evaluated by comparing a simple narrative modality with an enhanced one, where an introspective dialogue is adopted to explain and let transparent the internal reasoning processes of the characters. The obtained results show that storytelling affected the cognitive component of empathy, especially through the advanced narrative mode. textcopyright 2022 Elsevier B.V.},
keywords = {ACT-R, Anthropomorphic Robots, Cognitive Architectures, Cognitive Systems, Empathy, Gestural user interfaces, Introspective Dialogs, Personality, Reasoning, Social Agents, Social Practices, Storytelling Robots},
pubstate = {published},
tppubtype = {article}
}
2019
Augello, Agnese; Infantino, Ignazio; Maniscalco, Umberto; Pilato, Giovanni; Vella, Filippo
NarRob: A Humanoid Social Storyteller with Emotional Expression Capabilities Proceedings Article
In: relax AV Samsonovich, (Ed.): BIOLOGICALLY INSPIRED COGNITIVE ARCHITECTURES 2018, pp. 9–15, SPRINGER INTERNATIONAL PUBLISHING AG, GEWERBESTRASSE 11, CHAM, CH-6330, SWITZERLAND, 2019, ISBN: 978-3-319-99316-4 978-3-319-99315-7.
Abstract | Links | BibTeX | Tags: Education, Emotion Recognition, Gestural user interfaces, Robotics, Storytelling Robots
@inproceedings{augelloNarRobHumanoidSocial2019,
title = {NarRob: A Humanoid Social Storyteller with Emotional Expression Capabilities},
author = { Agnese Augello and Ignazio Infantino and Umberto Maniscalco and Giovanni Pilato and Filippo Vella},
editor = { {relax AV} Samsonovich},
doi = {10.1007/978-3-319-99316-4_2},
isbn = {978-3-319-99316-4 978-3-319-99315-7},
year = {2019},
date = {2019-01-01},
booktitle = {BIOLOGICALLY INSPIRED COGNITIVE ARCHITECTURES 2018},
volume = {848},
pages = {9--15},
publisher = {SPRINGER INTERNATIONAL PUBLISHING AG},
address = {GEWERBESTRASSE 11, CHAM, CH-6330, SWITZERLAND},
series = {Advances in Intelligent Systems and Computing},
abstract = {In this paper we propose a model of a robotic storyteller, focusing on its abilities to select the most appropriate gestures to accompany the story, trying to manifest also emotions related to the sentence that is being told. The robot is endowed with a repository of stories together with a set of gestures, inspired by those typically used by humans, that the robot learns by observation. The gestures are annotated by a number N of subjects, according to their particular meaning and considering a specific typology. They are exploited by the robot according to the story content to provide an engaging representation of the tale.},
keywords = {Education, Emotion Recognition, Gestural user interfaces, Robotics, Storytelling Robots},
pubstate = {published},
tppubtype = {inproceedings}
}
2017
Gentile, Vito; Milazzo, Fabrizio; Sorce, Salvatore; Gentile, Antonio; Augello, Agnese; Pilato, Giovanni
Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions Proceedings Article
In: Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017, pp. 69–72, Institute of Electrical and Electronics Engineers Inc., 2017, ISBN: 978-1-5090-4896-0.
Abstract | Links | BibTeX | Tags: Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis
@inproceedings{gentileBodyGesturesSpoken2017,
title = {Body Gestures and Spoken Sentences: A Novel Approach for Revealing User's Emotions},
author = { Vito Gentile and Fabrizio Milazzo and Salvatore Sorce and Antonio Gentile and Agnese Augello and Giovanni Pilato},
doi = {10.1109/ICSC.2017.14},
isbn = {978-1-5090-4896-0},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings - IEEE 11th International Conference on Semantic Computing, ICSC 2017},
pages = {69--72},
publisher = {Institute of Electrical and Electronics Engineers Inc.},
abstract = {In the last decade, there has been a growing interest in emotion analysis research, which has been applied in several areas of computer science. Many authors have contributed to the development of emotion recognition algorithms, considering textual or non verbal data as input, such as facial expressions, gestures or, in the case of multi-modal emotion recognition, a combination of them. In this paper, we describe a method to detect emotions from gestures using the skeletal data obtained from Kinect-like devices as input, as well as a textual description of their meaning. The experimental results show that the correlation existing between body movements and spoken user sentence(s) can be used to reveal user's emotions from gestures. textcopyright 2017 IEEE.},
keywords = {Emotion Analysis, Emotion Recognition, Facial Expressions, Gestural user interfaces, Human computer interaction, Semantic Computing, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Milazzo, Fabrizio; Augello, Agnese; Pilato, Giovanni; Gentile, Vito; Gentile, Antonio; Sorce, Salvatore
Exploiting Correlation between Body Gestures and Spoken Sentences for Real-Time Emotion Recognition Proceedings Article
In: ACM International Conference Proceeding Series, Association for Computing Machinery, 2017, ISBN: 978-1-4503-5237-6.
Abstract | Links | BibTeX | Tags: Emotion Recognition, Gestural user interfaces, Human computer interaction, Sentiment Analysis
@inproceedings{milazzoExploitingCorrelationBody2017,
title = {Exploiting Correlation between Body Gestures and Spoken Sentences for Real-Time Emotion Recognition},
author = { Fabrizio Milazzo and Agnese Augello and Giovanni Pilato and Vito Gentile and Antonio Gentile and Salvatore Sorce},
doi = {10.1145/3125571.3125590},
isbn = {978-1-4503-5237-6},
year = {2017},
date = {2017-01-01},
booktitle = {ACM International Conference Proceeding Series},
volume = {Part F131371},
publisher = {Association for Computing Machinery},
abstract = {Humans communicate their affective states through different media, both verbal and non-verbal, often used at the same time. The knowledge of the emotional state plays a key role to provide personalized and context-related information and services. This is the main reason why several algorithms have been proposed in the last few years for the automatic emotion recognition. In this work we exploit the correlation between one's affective state and the simultaneous body expressions in terms of speech and gestures. Herewe propose a system for real-Time emotion recognition from gestures. In a first step, the system builds a trusted dataset of association pairs (motion datatextrightarrow emotion pattern), also based on textual information. Such dataset is the ground truth for a further step, where emotion patterns can be extracted from new unclassified gestures. Experimental results demonstrate a good recognition accuracy and real-Time capabilities of the proposed system. textcopyright 2017 Copyright held by the owner/author(s).},
keywords = {Emotion Recognition, Gestural user interfaces, Human computer interaction, Sentiment Analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
2012
Coronato, Antonio; Gallo, Luigi
Toward a Framework for Rapid Prototyping of Touchless User Interfaces Proceedings Article
In: CISIS '12: Proceedings of the International Conference on Complex, Intelligent and Software Intensive Systems, pp. 539–543, IEEE Computer Society, Palermo, Italy, 2012, ISBN: 978-0-7695-4687-2.
Abstract | Links | BibTeX | Tags: Framework, Gestural user interfaces, Natural User Interfaces, Touchless interaction
@inproceedings{coronatoFrameworkRapidPrototyping2012,
title = {Toward a Framework for Rapid Prototyping of Touchless User Interfaces},
author = { Antonio Coronato and Luigi Gallo},
doi = {10.1109/CISIS.2012.77},
isbn = {978-0-7695-4687-2},
year = {2012},
date = {2012-07-01},
booktitle = {CISIS '12: Proceedings of the International Conference on Complex, Intelligent and Software Intensive Systems},
pages = {539--543},
publisher = {IEEE Computer Society},
address = {Palermo, Italy},
abstract = {Recent advances in depth-sensing technologies are fostering the design of Natural User Interfaces (NUI) for use in several application domains. However, due to the complexity of existing software components and to compatibility issues, the design process remains challenging. This paper presents a framework aimed at facilitating the development of natural, touchless user interfaces. The proposed framework, which is based on the publish-subscribe paradigm, allows product and interaction designers to rapidly prototype and test their system by building upon a set of standard modules. The framework also provides the building blocks to extend the basic set of modules, easing code reuse.},
keywords = {Framework, Gestural user interfaces, Natural User Interfaces, Touchless interaction},
pubstate = {published},
tppubtype = {inproceedings}
}