Back
Despite the importance of sensitive skin for living creatures, most robots can feel contact on only a tiny fraction of their exterior, if at all. Furthermore, typical robot reactions to touch are limited to event-based acknowledgments, lacking perceptual richness, lifelike positive/negative responses, and temporal dynamics. We address these gaps by introducing a practical full-body tactile-perception system for social robots, turning a NAO robot into the Haptic Empathetic Robot Animal (HERA). The sixteen main regions of the robot's body are instrumented with soft resistive tactile sensors covered by a tailored koala suit. Windows of each time-varying sensor output are continually classified into five gestures at two intensities via a two-stage machine-learning model. On challenging testing data containing simultaneous contacts, touch detection achieves an F1 score of 0.773, and gesture recognition achieves 52.2% accuracy (5.2 times chance); considering the temporal, spatial, and semantic adjacency of the applied touches increases these metrics to 0.896 and 86.6%, respectively. In turn, each detected contact drives a real-time emotion model that represents the robot's affective state as a second-order dynamic system analogous to a mass-spring-damper. This model's parameters control the robot's disposition, stoicism, and calmness. We explain the connections between HERA's hardware and software subsystems and demonstrate their combined ability to create an affective robot that feels both touch and emotion.
@article{Burns25-TAC-Emotion, title = {Creating an Affective Robot That Feels Both Touch and Emotion}, journal = {IEEE Transactions on Affective Computing}, abstract = {Despite the importance of sensitive skin for living creatures, most robots can feel contact on only a tiny fraction of their exterior, if at all. Furthermore, typical robot reactions to touch are limited to event-based acknowledgments, lacking perceptual richness, lifelike positive/negative responses, and temporal dynamics. We address these gaps by introducing a practical full-body tactile-perception system for social robots, turning a NAO robot into the Haptic Empathetic Robot Animal (HERA). The sixteen main regions of the robot's body are instrumented with soft resistive tactile sensors covered by a tailored koala suit. Windows of each time-varying sensor output are continually classified into five gestures at two intensities via a two-stage machine-learning model. On challenging testing data containing simultaneous contacts, touch detection achieves an F1 score of 0.773, and gesture recognition achieves 52.2% accuracy (5.2 times chance); considering the temporal, spatial, and semantic adjacency of the applied touches increases these metrics to 0.896 and 86.6%, respectively. In turn, each detected contact drives a real-time emotion model that represents the robot's affective state as a second-order dynamic system analogous to a mass-spring-damper. This model's parameters control the robot's disposition, stoicism, and calmness. We explain the connections between HERA's hardware and software subsystems and demonstrate their combined ability to create an affective robot that feels both touch and emotion.}, pages = {1--18}, month = dec, year = {2025}, note = {Rachael Bevill Burns and Benjamin A. Richardson contributed equally to this publication}, author = {Burns, Rachael Bevill and Richardson, Benjamin A. and Klingenberg, Jack and Kuchenbecker, Katherine J.}, doi = {10.1109/TAFFC.2025.3647658}, month_numeric = {12} }
More information