@inproceedings{kim-etal-2018-smilee,
title = "{SMILEE}: Symmetric Multi-modal Interactions with Language-gesture Enabled ({AI}) Embodiment",
author = "Kim, Sujeong and
Salter, David and
DeLuccia, Luke and
Son, Kilho and
Amer, Mohamed R. and
Tamrakar, Amir",
editor = "Liu, Yang and
Paek, Tim and
Patwardhan, Manasi",
booktitle = "Proceedings of the 2018 Conference of the North {A}merican Chapter of the Association for Computational Linguistics: Demonstrations",
month = jun,
year = "2018",
address = "New Orleans, Louisiana",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/N18-5018",
doi = "10.18653/v1/N18-5018",
pages = "86--90",
abstract = "We demonstrate an intelligent conversational agent system designed for advancing human-machine collaborative tasks. The agent is able to interpret a user{'}s communicative intent from both their verbal utterances and non-verbal behaviors, such as gestures. The agent is also itself able to communicate both with natural language and gestures, through its embodiment as an avatar thus facilitating natural symmetric multi-modal interactions. We demonstrate two intelligent agents with specialized skills in the Blocks World as use-cases of our system.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kim-etal-2018-smilee">
<titleInfo>
<title>SMILEE: Symmetric Multi-modal Interactions with Language-gesture Enabled (AI) Embodiment</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sujeong</namePart>
<namePart type="family">Kim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">David</namePart>
<namePart type="family">Salter</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Luke</namePart>
<namePart type="family">DeLuccia</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kilho</namePart>
<namePart type="family">Son</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mohamed</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Amer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amir</namePart>
<namePart type="family">Tamrakar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Demonstrations</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tim</namePart>
<namePart type="family">Paek</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Manasi</namePart>
<namePart type="family">Patwardhan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">New Orleans, Louisiana</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We demonstrate an intelligent conversational agent system designed for advancing human-machine collaborative tasks. The agent is able to interpret a user’s communicative intent from both their verbal utterances and non-verbal behaviors, such as gestures. The agent is also itself able to communicate both with natural language and gestures, through its embodiment as an avatar thus facilitating natural symmetric multi-modal interactions. We demonstrate two intelligent agents with specialized skills in the Blocks World as use-cases of our system.</abstract>
<identifier type="citekey">kim-etal-2018-smilee</identifier>
<identifier type="doi">10.18653/v1/N18-5018</identifier>
<location>
<url>https://aclanthology.org/N18-5018</url>
</location>
<part>
<date>2018-06</date>
<extent unit="page">
<start>86</start>
<end>90</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T SMILEE: Symmetric Multi-modal Interactions with Language-gesture Enabled (AI) Embodiment
%A Kim, Sujeong
%A Salter, David
%A DeLuccia, Luke
%A Son, Kilho
%A Amer, Mohamed R.
%A Tamrakar, Amir
%Y Liu, Yang
%Y Paek, Tim
%Y Patwardhan, Manasi
%S Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Demonstrations
%D 2018
%8 June
%I Association for Computational Linguistics
%C New Orleans, Louisiana
%F kim-etal-2018-smilee
%X We demonstrate an intelligent conversational agent system designed for advancing human-machine collaborative tasks. The agent is able to interpret a user’s communicative intent from both their verbal utterances and non-verbal behaviors, such as gestures. The agent is also itself able to communicate both with natural language and gestures, through its embodiment as an avatar thus facilitating natural symmetric multi-modal interactions. We demonstrate two intelligent agents with specialized skills in the Blocks World as use-cases of our system.
%R 10.18653/v1/N18-5018
%U https://aclanthology.org/N18-5018
%U https://doi.org/10.18653/v1/N18-5018
%P 86-90
Markdown (Informal)
[SMILEE: Symmetric Multi-modal Interactions with Language-gesture Enabled (AI) Embodiment](https://aclanthology.org/N18-5018) (Kim et al., NAACL 2018)
ACL