@inproceedings{lerch-etal-2024-emolis,
title = "{EMOLIS} App and Dataset to Find Emotionally Close Cartoons",
author = {Lerch, So{\"e}lie and
Bellot, Patrice and
Murisasco, Elisabeth and
Bruno, Emmanuel},
editor = "Calzolari, Nicoletta and
Kan, Min-Yen and
Hoste, Veronique and
Lenci, Alessandro and
Sakti, Sakriani and
Xue, Nianwen",
booktitle = "Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)",
month = may,
year = "2024",
address = "Torino, Italia",
publisher = "ELRA and ICCL",
url = "https://aclanthology.org/2024.lrec-main.502",
pages = "5654--5659",
abstract = "We propose EMOLIS Dataset that contains annotated emotional transcripts of scenes from Walt Disney cartoons at the same time as physiological signals from spectators (breathing, ECG, eye movements). The dataset is used in EMOLIS App, our second proposal. EMOLIS App allows to display the identified emotions while a video is playing and suggest emotionally comparable videos. We propose to estimate an emotional distance between videos using multimodal neural representations (text, audio, video) that also combine physiological signals. This enables personalized results that can be used for cognitive therapies focusing on awareness of felt emotions. The dataset is designed to be suitable for all audiences and autistic people who have difficulties to recognize and express emotions.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="lerch-etal-2024-emolis">
<titleInfo>
<title>EMOLIS App and Dataset to Find Emotionally Close Cartoons</title>
</titleInfo>
<name type="personal">
<namePart type="given">Soëlie</namePart>
<namePart type="family">Lerch</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Patrice</namePart>
<namePart type="family">Bellot</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Elisabeth</namePart>
<namePart type="family">Murisasco</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Emmanuel</namePart>
<namePart type="family">Bruno</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2024-05</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Nicoletta</namePart>
<namePart type="family">Calzolari</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Min-Yen</namePart>
<namePart type="family">Kan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Veronique</namePart>
<namePart type="family">Hoste</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alessandro</namePart>
<namePart type="family">Lenci</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sakriani</namePart>
<namePart type="family">Sakti</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nianwen</namePart>
<namePart type="family">Xue</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>ELRA and ICCL</publisher>
<place>
<placeTerm type="text">Torino, Italia</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We propose EMOLIS Dataset that contains annotated emotional transcripts of scenes from Walt Disney cartoons at the same time as physiological signals from spectators (breathing, ECG, eye movements). The dataset is used in EMOLIS App, our second proposal. EMOLIS App allows to display the identified emotions while a video is playing and suggest emotionally comparable videos. We propose to estimate an emotional distance between videos using multimodal neural representations (text, audio, video) that also combine physiological signals. This enables personalized results that can be used for cognitive therapies focusing on awareness of felt emotions. The dataset is designed to be suitable for all audiences and autistic people who have difficulties to recognize and express emotions.</abstract>
<identifier type="citekey">lerch-etal-2024-emolis</identifier>
<location>
<url>https://aclanthology.org/2024.lrec-main.502</url>
</location>
<part>
<date>2024-05</date>
<extent unit="page">
<start>5654</start>
<end>5659</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T EMOLIS App and Dataset to Find Emotionally Close Cartoons
%A Lerch, Soëlie
%A Bellot, Patrice
%A Murisasco, Elisabeth
%A Bruno, Emmanuel
%Y Calzolari, Nicoletta
%Y Kan, Min-Yen
%Y Hoste, Veronique
%Y Lenci, Alessandro
%Y Sakti, Sakriani
%Y Xue, Nianwen
%S Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)
%D 2024
%8 May
%I ELRA and ICCL
%C Torino, Italia
%F lerch-etal-2024-emolis
%X We propose EMOLIS Dataset that contains annotated emotional transcripts of scenes from Walt Disney cartoons at the same time as physiological signals from spectators (breathing, ECG, eye movements). The dataset is used in EMOLIS App, our second proposal. EMOLIS App allows to display the identified emotions while a video is playing and suggest emotionally comparable videos. We propose to estimate an emotional distance between videos using multimodal neural representations (text, audio, video) that also combine physiological signals. This enables personalized results that can be used for cognitive therapies focusing on awareness of felt emotions. The dataset is designed to be suitable for all audiences and autistic people who have difficulties to recognize and express emotions.
%U https://aclanthology.org/2024.lrec-main.502
%P 5654-5659
Markdown (Informal)
[EMOLIS App and Dataset to Find Emotionally Close Cartoons](https://aclanthology.org/2024.lrec-main.502) (Lerch et al., LREC-COLING 2024)
ACL
- Soëlie Lerch, Patrice Bellot, Elisabeth Murisasco, and Emmanuel Bruno. 2024. EMOLIS App and Dataset to Find Emotionally Close Cartoons. In Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024), pages 5654–5659, Torino, Italia. ELRA and ICCL.