@inproceedings{saffar-mehrjardi-etal-2019-self,
title = "Self-Attentional Models Application in Task-Oriented Dialogue Generation Systems",
author = "Saffar Mehrjardi, Mansour and
Trabelsi, Amine and
Zaiane, Osmar R.",
editor = "Mitkov, Ruslan and
Angelova, Galia",
booktitle = "Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)",
month = sep,
year = "2019",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd.",
url = "https://aclanthology.org/R19-1119",
doi = "10.26615/978-954-452-056-4_119",
pages = "1031--1040",
abstract = "Self-attentional models are a new paradigm for sequence modelling tasks which differ from common sequence modelling methods, such as recurrence-based and convolution-based sequence learning, in the way that their architecture is only based on the attention mechanism. Self-attentional models have been used in the creation of the state-of-the-art models in many NLP task such as neural machine translation, but their usage has not been explored for the task of training end-to-end task-oriented dialogue generation systems yet. In this study, we apply these models on the DSTC2 dataset for training task-oriented chatbots. Our finding shows that self-attentional models can be exploited to create end-to-end task-oriented chatbots which not only achieve higher evaluation scores compared to recurrence-based models, but also do so more efficiently.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="saffar-mehrjardi-etal-2019-self">
<titleInfo>
<title>Self-Attentional Models Application in Task-Oriented Dialogue Generation Systems</title>
</titleInfo>
<name type="personal">
<namePart type="given">Mansour</namePart>
<namePart type="family">Saffar Mehrjardi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amine</namePart>
<namePart type="family">Trabelsi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Osmar</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Zaiane</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ruslan</namePart>
<namePart type="family">Mitkov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Galia</namePart>
<namePart type="family">Angelova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>INCOMA Ltd.</publisher>
<place>
<placeTerm type="text">Varna, Bulgaria</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Self-attentional models are a new paradigm for sequence modelling tasks which differ from common sequence modelling methods, such as recurrence-based and convolution-based sequence learning, in the way that their architecture is only based on the attention mechanism. Self-attentional models have been used in the creation of the state-of-the-art models in many NLP task such as neural machine translation, but their usage has not been explored for the task of training end-to-end task-oriented dialogue generation systems yet. In this study, we apply these models on the DSTC2 dataset for training task-oriented chatbots. Our finding shows that self-attentional models can be exploited to create end-to-end task-oriented chatbots which not only achieve higher evaluation scores compared to recurrence-based models, but also do so more efficiently.</abstract>
<identifier type="citekey">saffar-mehrjardi-etal-2019-self</identifier>
<identifier type="doi">10.26615/978-954-452-056-4_119</identifier>
<location>
<url>https://aclanthology.org/R19-1119</url>
</location>
<part>
<date>2019-09</date>
<extent unit="page">
<start>1031</start>
<end>1040</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Self-Attentional Models Application in Task-Oriented Dialogue Generation Systems
%A Saffar Mehrjardi, Mansour
%A Trabelsi, Amine
%A Zaiane, Osmar R.
%Y Mitkov, Ruslan
%Y Angelova, Galia
%S Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)
%D 2019
%8 September
%I INCOMA Ltd.
%C Varna, Bulgaria
%F saffar-mehrjardi-etal-2019-self
%X Self-attentional models are a new paradigm for sequence modelling tasks which differ from common sequence modelling methods, such as recurrence-based and convolution-based sequence learning, in the way that their architecture is only based on the attention mechanism. Self-attentional models have been used in the creation of the state-of-the-art models in many NLP task such as neural machine translation, but their usage has not been explored for the task of training end-to-end task-oriented dialogue generation systems yet. In this study, we apply these models on the DSTC2 dataset for training task-oriented chatbots. Our finding shows that self-attentional models can be exploited to create end-to-end task-oriented chatbots which not only achieve higher evaluation scores compared to recurrence-based models, but also do so more efficiently.
%R 10.26615/978-954-452-056-4_119
%U https://aclanthology.org/R19-1119
%U https://doi.org/10.26615/978-954-452-056-4_119
%P 1031-1040
Markdown (Informal)
[Self-Attentional Models Application in Task-Oriented Dialogue Generation Systems](https://aclanthology.org/R19-1119) (Saffar Mehrjardi et al., RANLP 2019)
ACL