@inproceedings{deguchi-etal-2021-synchronous,
title = "Synchronous Syntactic Attention for Transformer Neural Machine Translation",
author = "Deguchi, Hiroyuki and
Tamura, Akihiro and
Ninomiya, Takashi",
editor = "Kabbara, Jad and
Lin, Haitao and
Paullada, Amandalynne and
Vamvas, Jannis",
booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.acl-srw.36",
doi = "10.18653/v1/2021.acl-srw.36",
pages = "348--355",
abstract = "This paper proposes a novel attention mechanism for Transformer Neural Machine Translation, {``}Synchronous Syntactic Attention,{''} inspired by synchronous dependency grammars. The mechanism synchronizes source-side and target-side syntactic self-attentions by minimizing the difference between target-side self-attentions and the source-side self-attentions mapped by the encoder-decoder attention matrix. The experiments show that the proposed method improves the translation performance on WMT14 En-De, WMT16 En-Ro, and ASPEC Ja-En (up to +0.38 points in BLEU).",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="deguchi-etal-2021-synchronous">
<titleInfo>
<title>Synchronous Syntactic Attention for Transformer Neural Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Hiroyuki</namePart>
<namePart type="family">Deguchi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Akihiro</namePart>
<namePart type="family">Tamura</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Takashi</namePart>
<namePart type="family">Ninomiya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jad</namePart>
<namePart type="family">Kabbara</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Haitao</namePart>
<namePart type="family">Lin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amandalynne</namePart>
<namePart type="family">Paullada</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jannis</namePart>
<namePart type="family">Vamvas</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper proposes a novel attention mechanism for Transformer Neural Machine Translation, “Synchronous Syntactic Attention,” inspired by synchronous dependency grammars. The mechanism synchronizes source-side and target-side syntactic self-attentions by minimizing the difference between target-side self-attentions and the source-side self-attentions mapped by the encoder-decoder attention matrix. The experiments show that the proposed method improves the translation performance on WMT14 En-De, WMT16 En-Ro, and ASPEC Ja-En (up to +0.38 points in BLEU).</abstract>
<identifier type="citekey">deguchi-etal-2021-synchronous</identifier>
<identifier type="doi">10.18653/v1/2021.acl-srw.36</identifier>
<location>
<url>https://aclanthology.org/2021.acl-srw.36</url>
</location>
<part>
<date>2021-08</date>
<extent unit="page">
<start>348</start>
<end>355</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Synchronous Syntactic Attention for Transformer Neural Machine Translation
%A Deguchi, Hiroyuki
%A Tamura, Akihiro
%A Ninomiya, Takashi
%Y Kabbara, Jad
%Y Lin, Haitao
%Y Paullada, Amandalynne
%Y Vamvas, Jannis
%S Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop
%D 2021
%8 August
%I Association for Computational Linguistics
%C Online
%F deguchi-etal-2021-synchronous
%X This paper proposes a novel attention mechanism for Transformer Neural Machine Translation, “Synchronous Syntactic Attention,” inspired by synchronous dependency grammars. The mechanism synchronizes source-side and target-side syntactic self-attentions by minimizing the difference between target-side self-attentions and the source-side self-attentions mapped by the encoder-decoder attention matrix. The experiments show that the proposed method improves the translation performance on WMT14 En-De, WMT16 En-Ro, and ASPEC Ja-En (up to +0.38 points in BLEU).
%R 10.18653/v1/2021.acl-srw.36
%U https://aclanthology.org/2021.acl-srw.36
%U https://doi.org/10.18653/v1/2021.acl-srw.36
%P 348-355
Markdown (Informal)
[Synchronous Syntactic Attention for Transformer Neural Machine Translation](https://aclanthology.org/2021.acl-srw.36) (Deguchi et al., ACL-IJCNLP 2021)
ACL
- Hiroyuki Deguchi, Akihiro Tamura, and Takashi Ninomiya. 2021. Synchronous Syntactic Attention for Transformer Neural Machine Translation. In Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop, pages 348–355, Online. Association for Computational Linguistics.