@inproceedings{ebb35ecfa7ba49ba827ed154418304fb,
title = "Affective human-robotic interaction",
abstract = "Entertainment robots are becoming commonplace in the home. Users are less fearful of interacting with robotic systems however these interactions are often limited to performing pre-recording sequences of actions. The next generation of consumer-level entertainment robots should offer more natural interfacing and more engaging interaction. This paper reports on the development and evaluation of a consumer-level robotic dog with acoustic emotion recognition capabilities. The dog can recognise the emotional state of it's owner from affective cues in the owner's speech and respond with appropriate actions. The evaluation study shows that users can recognise the new robotic dog to be emotionally intelligent and report that this makes the dog appear more 'alive'. {\textcopyright} 2008 Springer-Verlag Berlin Heidelberg.",
keywords = "Acoustic emotion recognition, Affective computing, Entertainment Robots, Human-Robotic interaction, Sony AIBO",
author = "Christian Jones and Andrew Deeming",
year = "2008",
doi = "10.1007/978-3-540-85099-1_15",
language = "English",
isbn = "3540850988",
volume = "4868 LNCS",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
pages = "175--185",
booktitle = "Affect and Emotion in Human-Computer Interaction - From Theory to Applications",
}