@inproceedings{chen-etal-2022-modeling-compositionality,
title = "Modeling Compositionality with Dependency Graph for Dialogue Generation",
author = "Chen, Xiaofeng and
Chen, Yirong and
Xing, Xiaofen and
Xu, Xiangmin and
Han, Wenjing and
Tie, Qianfeng",
editor = "Chen, Wenhu and
Chen, Xinyun and
Chen, Zhiyu and
Yao, Ziyu and
Yasunaga, Michihiro and
Yu, Tao and
Zhang, Rui",
booktitle = "Proceedings of the Workshop on Structured and Unstructured Knowledge Integration (SUKI)",
month = jul,
year = "2022",
address = "Seattle, USA",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.suki-1.2",
doi = "10.18653/v1/2022.suki-1.2",
pages = "9--16",
abstract = "Because of the compositionality of natural language, syntactic structure which contains the information about the relationship between words is a key factor for semantic understanding. However, the widely adopted Transformer is hard to learn the syntactic structure effectively in dialogue generation tasks. To explicitly model the compositionaity of language in Transformer Block, we restrict the information flow between words by constructing directed dependency graph and propose Dependency Relation Attention (DRA). Experimental results demonstrate that DRA can further improve the performance of state-of-the-art models for dialogue generation.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="chen-etal-2022-modeling-compositionality">
<titleInfo>
<title>Modeling Compositionality with Dependency Graph for Dialogue Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xiaofeng</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yirong</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xiaofen</namePart>
<namePart type="family">Xing</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xiangmin</namePart>
<namePart type="family">Xu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wenjing</namePart>
<namePart type="family">Han</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Qianfeng</namePart>
<namePart type="family">Tie</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Workshop on Structured and Unstructured Knowledge Integration (SUKI)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Wenhu</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xinyun</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhiyu</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ziyu</namePart>
<namePart type="family">Yao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Michihiro</namePart>
<namePart type="family">Yasunaga</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tao</namePart>
<namePart type="family">Yu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Rui</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Seattle, USA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Because of the compositionality of natural language, syntactic structure which contains the information about the relationship between words is a key factor for semantic understanding. However, the widely adopted Transformer is hard to learn the syntactic structure effectively in dialogue generation tasks. To explicitly model the compositionaity of language in Transformer Block, we restrict the information flow between words by constructing directed dependency graph and propose Dependency Relation Attention (DRA). Experimental results demonstrate that DRA can further improve the performance of state-of-the-art models for dialogue generation.</abstract>
<identifier type="citekey">chen-etal-2022-modeling-compositionality</identifier>
<identifier type="doi">10.18653/v1/2022.suki-1.2</identifier>
<location>
<url>https://aclanthology.org/2022.suki-1.2</url>
</location>
<part>
<date>2022-07</date>
<extent unit="page">
<start>9</start>
<end>16</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Modeling Compositionality with Dependency Graph for Dialogue Generation
%A Chen, Xiaofeng
%A Chen, Yirong
%A Xing, Xiaofen
%A Xu, Xiangmin
%A Han, Wenjing
%A Tie, Qianfeng
%Y Chen, Wenhu
%Y Chen, Xinyun
%Y Chen, Zhiyu
%Y Yao, Ziyu
%Y Yasunaga, Michihiro
%Y Yu, Tao
%Y Zhang, Rui
%S Proceedings of the Workshop on Structured and Unstructured Knowledge Integration (SUKI)
%D 2022
%8 July
%I Association for Computational Linguistics
%C Seattle, USA
%F chen-etal-2022-modeling-compositionality
%X Because of the compositionality of natural language, syntactic structure which contains the information about the relationship between words is a key factor for semantic understanding. However, the widely adopted Transformer is hard to learn the syntactic structure effectively in dialogue generation tasks. To explicitly model the compositionaity of language in Transformer Block, we restrict the information flow between words by constructing directed dependency graph and propose Dependency Relation Attention (DRA). Experimental results demonstrate that DRA can further improve the performance of state-of-the-art models for dialogue generation.
%R 10.18653/v1/2022.suki-1.2
%U https://aclanthology.org/2022.suki-1.2
%U https://doi.org/10.18653/v1/2022.suki-1.2
%P 9-16
Markdown (Informal)
[Modeling Compositionality with Dependency Graph for Dialogue Generation](https://aclanthology.org/2022.suki-1.2) (Chen et al., SUKI 2022)
ACL