@inproceedings {2021, title = {Induction of the being-seen-feeling by an embodied conversational agent in a socially interactive context}, booktitle = {21st ACM International Conference on Intelligent Virtual Agents}, year = {2021}, note = {Download (Open Access)}, month = {09/2021}, url = {https://hal.archives-ouvertes.fr/hal-03342893/document}, author = {Micka{\"e}lla Grondin-Verdon and Nezih Younsi and Michele Grimaldi and Catherine Pelachaud and Laurence Chaby and Lola Ca{\~n}amero} } @article {2013, title = {Interpretation of Emotional Body Language Displayed by a Humanoid Robot: A Case Study with Children}, journal = {International Journal of Social Robotics}, volume = {5}, year = {2013}, note = {Download}, pages = {325{\textendash}334}, abstract = {The work reported in this paper focuses on giving humanoid robots the capacity to express emotions with their body. Previous results show that adults are able to interpret different key poses displayed by a humanoid robot and also that changing the head position affects the expressiveness of the key poses in a consistent way. Moving the head down leads to decreased arousal (the level of energy) and valence (positive or negative emotion) whereas moving the head up produces an increase along these dimensions. Hence, changing the head position during an interaction should send intuitive signals. The study reported in this paper tested children{\textquoteright}s ability to recognize the emotional body language displayed by a humanoid robot. The results suggest that body postures and head position can be used to convey emotions during child-robot interaction.}, keywords = {emotion, emotional body language, perception, Social robotics}, issn = {1875-4791}, doi = {10.1007/s12369-013-0193-z}, url = {https://link.springer.com/article/10.1007/s12369-013-0193-z}, author = {Aryel Beck and Lola Ca{\~n}amero and Antoine Hiolle and Luisa Damiano and Cosi, Piero and Tesser, Fabio and Sommavilla, Giacomo} } @inproceedings {2010, title = {Interpretation of Emotional Body Language Displayed by Robots}, booktitle = {Proc. 3rd International Workshop on Affective Interaction in Natural Environments, AFFINE{\textquoteright}10}, year = {2010}, pages = {37{\textendash}42}, publisher = {ACM}, organization = {ACM}, address = {Firenze, Italy}, abstract = {In order for robots to be socially accepted and generate empathy they must display emotions. For robots such as Nao, body language is the best medium available, as they do not have the ability to display facial expressions. Displaying emotional body language that can be interpreted whilst interacting with the robot should greatly improve its acceptance. This research investigates the creation of an "Affect Space" for the generation of emotional body language that could be displayed by robots. An Affect Space is generated by "blending" (i.e. interpolating between) different emotional expressions to create new ones. An Affect Space for body language based on the Circumplex Model of emotions has been created. The experiment reported in this paper investigated the perception of specific key poses from the Affect Space. The results suggest that this Affect Space for body expressions can be used to improve the expressiveness of humanoid robots. In addition, early results of a pilot study are described. It revealed that the context helps human subjects improve their recognition rate during a human-robot imitation game, and in turn this recognition leads to better outcome of the interactions.}, isbn = {978-1-4503-0170-1}, doi = {10.1145/1877826.1877837}, author = {Aryel Beck and Antoine Hiolle and Alexandre Mazel and Lola Ca{\~n}amero} } @inproceedings {2009, title = {The Importance of the Body in Affect-Modulated Action Selection: A Case Study Comparing Proximal Versus Distal Perception in a Prey-Predator Scenario}, booktitle = {Proc. 3rd Intl. Conference on Affective Computing and Intelligent Interaction (ACII 2009)}, year = {2009}, month = {09/2009}, pages = {1{\textendash}6}, publisher = {IEEE Press}, organization = {IEEE Press}, address = {Amsterdam, The Netherlands}, abstract = {In the context of the animat approach, we investigate the effect of an emotion-like hormonal mechanism, as a modulator of perception - and second order controller to an underlying motivation-based action selection architecture - on brain-body-environment interactions within a prey-predator scenario. We are particularly interested in the effects that affective modulation of different perceptual capabilities has on the dynamics of interactions between predator and prey, as part of a broader study of the adaptive value of emotional states such as "fear" and "aggression" in the context of action selection. In this paper we present experiments where we modulated the architecture of a prey robot using two different types of sensory capabilities, proximal and distal, effectively creating combinations of different prey "brains" and "bodies".}, issn = {2156-8103}, doi = {10.1109/ACII.2009.5349596}, author = {O{\textquoteright}Bryne, Claire and Lola Ca{\~n}amero and John C Murray} } @inproceedings {2009, title = {The Influence of Social Interaction on the Perception of Emotional Expression: A Case Study with a Robot Head}, booktitle = {Advances in Robotics: Proc. FIRA RoboWorld Congress 2009}, series = {Lecture Notes in Computer Science}, volume = {5744}, year = {2009}, month = {08/2009}, pages = {63{\textendash}72}, publisher = {Springer Berlin Heidelberg}, organization = {Springer Berlin Heidelberg}, address = {Incheon, Korea}, abstract = {In this paper we focus primarily on the influence that socio-emotional interaction has on the perception of emotional expression by a robot. We also investigate and discuss the importance of emotion expression in socially interactive situations involving human robot interaction (HRI), and show the importance of utilising emotion expression when dealing with interactive robots, that are to learn and develop in socially situated environments. We discuss early expressional development and the function of emotion in communication in humans and how this can improve HRI communications. Finally we provide experimental results showing how emotion-rich interaction via emotion expression can affect the HRI process by providing additional information.}, isbn = {978-3-642-03983-6}, doi = {10.1007/978-3-642-03983-6_10}, url = {https://link.springer.com/chapter/10.1007\%2F978-3-642-03983-6_10}, author = {John C Murray and Lola Ca{\~n}amero and Kim A. Bard and Ross, Marina Davila and Thorsteinsson, Kate}, editor = {Kim, Jong-Hwan and Ge, Shuzhi Sam and Vadakkepat, Prahlad and Jesse, Norbert and Al Manum, Abdullah and Puthusserypady K, Sadasivan and R{\"u}ckert, Ulrich and Sitte, Joaquin and Witkowski, Ulf and Nakatsu, Ryohei and Braunl, Thomas and Baltes, Jacky and Anderson, John and Wong, Ching-Chang and Verner, Igor and Ahlgren, David} } @inproceedings {2005, title = {Introducing Neuromodulation to a Braitenberg Vehicle}, booktitle = {Proc. 2005 IEEE Int. Conf. on Robotics and Automation: Robots get Closer to Humans (ICRA{\textquoteright}05)}, year = {2005}, month = {04/2005}, pages = {4199{\textendash}4204}, publisher = {IEEE Press}, organization = {IEEE Press}, address = {Barcelona, Spain}, abstract = {Artificial neural networks are often used as the control systems for mobile robots. However, although these models usually claim inspiration from biology, they often lack an analogue of the biological phenomenon called neuromodulation. In this paper, we describe our initial work exploring a simple model of neuromodulation, used to provide a mobile robot with foraging behaviour.}, isbn = {0-7803-8914-X}, issn = {1050-4729}, doi = {10.1109/ROBOT.2005.1570763}, url = {http://ieeexplore.ieee.org/abstract/document/1570763/}, author = {French, Richard L B and Lola Ca{\~n}amero} } @article {2004, title = {Intelligenza artificiale in medicina: progetto di una piattaforma mobile inserita in un ambiente intelligente per l{\textquoteright}assistenza ai disabili e agli anziani}, journal = {Recenti Progressi in Medicina}, volume = {95}, year = {2004}, pages = {190{\textendash}195}, publisher = {Pensiero scientifico}, abstract = {Viene presentato un progetto basato sull{\textquoteright}integrazione di nuove tecnologie e di Intelligenza artificiale per sviluppare uno strumento {\textendash} e-tool {\textendash} indirizzato alle persone disabili ed agli anziani. Una piattaforma mobile inserita all{\textquoteright}interno di ambienti intelligenti (strutture di assistenza o abitazioni), controllata e gestita attraverso un{\textquoteright}architettura multilivello, viene proposta come supporto sia per i pazienti che per i caregiver al fine di aumentare l{\textquoteright}autonomia nella vita quotidiana. A project based on the integration of new technologies and artificial intelligence to develop a device {\textendash} e-tool {\textendash} for disabled patients and elderly people is presented. A mobile platform in intelligent environments (skilled-care facilities and home-care), controlled and managed by a multi-level architecture, is proposed to support patients and caregivers to increase self-dependency in activities of daily living.}, issn = {0034-1193}, doi = {10.1701/39.314}, author = {Cort{\'e}s, Ulises and Annicchiarico, Roberta and Campana, Fabio and V{\'a}zquez-Salceda, Javier and Urdiales, Cristina and Lola Ca{\~n}amero and Maite L{\'o}pez and Miquel S{\`a}nchez-Marr{\`e} and Di Vincenzo, Sarah and Carlo Caltagirone} } @article {2001, title = {I Show You How I Like You{\textemdash}Can You Read it in My Face?}, journal = {IEEE Transactions on Systems, Man and Cybernetics, Part A: Systems and Humans}, volume = {31}, year = {2001}, month = {09/2001}, pages = {454{\textendash}459}, publisher = {IEEE}, abstract = {We report work on a LEGO robot that displays different emotional expressions in response to physical stimulation, for the purpose of social interaction with humans. This is a first step toward our longer-term goal of exploring believable emotional exchanges to achieve plausible interaction with a simple robot. Drawing inspiration from theories of human basic emotions, we have implemented several prototypical expressions in the robot{\textquoteright}s caricaturized face and conducted experiments to assess the recognizability of these expressions.}, issn = {1083-4427}, doi = {10.1109/3468.952719}, url = {http://ieeexplore.ieee.org/document/952719/}, author = {Ca{\~n}amero, Lola D and Fredslund, Jakob} } @article {2000, title = {I Show You How I Like You: Human-Robot Interaction through Emotional Expression and Tactile Stimulation}, year = {2000}, institution = {University of Aarhus, Denmark}, abstract = {We report work on a LEGO robot capable of displaying several emotional expressions in response to physical contact. Our motivation has been to explore believable emotional exchanges to achieve plausible interaction with a simple robot. We have worked toward this goal in two ways. First, acknowledging the importance of physical manipulation in children{\textquoteright}s interactions, interaction with the robot is through tactile stimulation; the various kinds of stimulation that can elicit the robot{\textquoteright}s emotions are grounded in a model of emotion activation based on different stimulation patterns. Second, emotional states need to be clearly conveyed. We have drawn inspiration from theories of human basic emotions with associated universal facial expressions, which we have implemented in a caricaturized face. We have conducted experiments on both children and adults to assess the recognizability of these expressions.}, url = {http://ojs.statsbiblioteket.dk/index.php/daimipb/article/view/7078}, author = {Ca{\~n}amero, Lola D and Fredslund, Jakob} } @inproceedings {1999, title = {Imitating Human Performances to Automatically Generate Expressive Jazz Ballads}, booktitle = {Proc. AISB{\textquoteright}99 Symposium on Imitation in Animals and Artifacts}, year = {1999}, pages = {115{\textendash}20}, publisher = {AISB}, organization = {AISB}, address = {Edinburgh, Scotland}, abstract = {One of the main problems with the automatic generation of expressive musical performances is to grasp the way in which human performers use musical knowledge that is not explicitly noted in musical scores. Moreover, this knowledge is tacit, difficult to verbalize, and therefore it must be acquired through a process of observation, imitation, and experimentation. For this reason, AI approaches based on declarative knowledge representations have serious limitations. An alternative approach is that of directly using the implicit knowledge that is in examples from recordings of human performances. In this paper, we describe a case-based reasoning system that generates expressive musical performances imitating examples of expressive human performances.}, author = {D Ca{\~n}amero and Josep Llu{\'\i}s Arcos and Ramon L{\'o}pez de M{\'a}ntaras} } @inproceedings {1998, title = {Issues in the Design of Emotional Agents}, booktitle = {Emotional and Intelligent: The Tangled Knot of Cognition. Papers from the 1998 AAAI Fall Symposium}, year = {1998}, pages = {49{\textendash}54}, publisher = {AAAI Press}, organization = {AAAI Press}, author = {D Ca{\~n}amero}, editor = {D Ca{\~n}amero} }