@inproceedings{14e556e8a2fe416095b1f73610afd030,
title = "The Emotographic Iceberg: Modelling Deep Emotional Affects Utilizing Intelligent Assistants and the IoT",
abstract = "Ninety percent of an iceberg is said to reside below the surface, in the hidden depths of the water, leaving only ten percent to be easily observed. In this paper the authors posit that many human emotion indicators emulate this trait, residing within the inferential data from interactions with popular IoT devices and applications. The visible 'tip of the iceberg' encapsulates the most widely studied 'tells' of emotion in the form of facial analysis, natural language processing and voice analysis. These provide a discrete frozen snapshot of a person's emotional disposition. This paper presents the hypothesis that below the surface lies a largely untapped, vast resource of submerged data that may be used to infer the emotional state of an individual. The phenomenon of the Internet of Things has cultivated a societal shift where sensors and applications gather data relating to every facet of daily life. This data is centralized by hub devices such as Voice Command Devices and accessible via Intelligent Assistants such as the Amazon Echo and Alexa. Emotographic Modelling is a new concept rendering how human emotional state may be gleaned from the raft of digital indicators available from these hubs. The 'Emotographic' classifications generated are constituted by study of the statistical data relating to digital emotion indicators. By utilizing the IoT, the Cloud and Machine Learning, the inferential depths of the iceberg may be explored to provide insight into sleep, diet, exercise and other routines and habits. The complex 'hidden' portion of the Emotographic Iceberg may reveal patterns that indicate emotion over a continuous timescale. Changes in these patterns may allow for a more sagacious comprehension of an individual's state of mind for healthcare clinicians and marketers. Preliminary testing is outlined in which the authors demonstrate how the emotion of sadness may be inferred from a range of questions asked to an IoT connected Amazon Echo Voice Command Device.",
keywords = "Emotions, Emotographics, affective-computing, amazon-echo, depression",
author = "Eoghan Furey and Juanita Blue",
note = "Publisher Copyright: {\textcopyright} 2019 IEEE.; 19th International Conference on Computational Science and Its Applications, ICCSA 2019 ; Conference date: 30-06-2019 Through 03-07-2019",
year = "2019",
month = jul,
doi = "10.1109/ICCSA.2019.00037",
language = "English",
series = "Proceedings - 2019 19th International Conference on Computational Science and Its Applications, ICCSA 2019",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "175--180",
editor = "Sanjay Misra and Osvaldo Gervasi and Beniamino Murgante and Elena Stankova and Vladimir Korkhov and Carmelo Torre and Rocha, {Ana Maria A. C.} and David Taniar and Apduhan, {Bernady O.} and Eufemia Tarantino",
booktitle = "Proceedings - 2019 19th International Conference on Computational Science and Its Applications, ICCSA 2019",
}