@inproceedings{zhou-etal-2019-improving,
title = "Improving Robustness of Neural Machine Translation with Multi-task Learning",
author = "Zhou, Shuyan and
Zeng, Xiangkai and
Zhou, Yingqi and
Anastasopoulos, Antonios and
Neubig, Graham",
booktitle = "Proceedings of the Fourth Conference on Machine Translation (Volume 2: Shared Task Papers, Day 1)",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W19-5368",
doi = "10.18653/v1/W19-5368",
pages = "565--571",
abstract = "While neural machine translation (NMT) achieves remarkable performance on clean, in-domain text, performance is known to degrade drastically when facing text which is full of typos, grammatical errors and other varieties of noise. In this work, we propose a multi-task learning algorithm for transformer-based MT systems that is more resilient to this noise. We describe our submission to the WMT 2019 Robustness shared task based on this method. Our model achieves a BLEU score of 32.8 on the shared task French to English dataset, which is 7.1 BLEU points higher than the baseline vanilla transformer trained with clean text.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="zhou-etal-2019-improving">
<titleInfo>
<title>Improving Robustness of Neural Machine Translation with Multi-task Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shuyan</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xiangkai</namePart>
<namePart type="family">Zeng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yingqi</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Antonios</namePart>
<namePart type="family">Anastasopoulos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Graham</namePart>
<namePart type="family">Neubig</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fourth Conference on Machine Translation (Volume 2: Shared Task Papers, Day 1)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Florence, Italy</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>While neural machine translation (NMT) achieves remarkable performance on clean, in-domain text, performance is known to degrade drastically when facing text which is full of typos, grammatical errors and other varieties of noise. In this work, we propose a multi-task learning algorithm for transformer-based MT systems that is more resilient to this noise. We describe our submission to the WMT 2019 Robustness shared task based on this method. Our model achieves a BLEU score of 32.8 on the shared task French to English dataset, which is 7.1 BLEU points higher than the baseline vanilla transformer trained with clean text.</abstract>
<identifier type="citekey">zhou-etal-2019-improving</identifier>
<identifier type="doi">10.18653/v1/W19-5368</identifier>
<location>
<url>https://aclanthology.org/W19-5368</url>
</location>
<part>
<date>2019-08</date>
<extent unit="page">
<start>565</start>
<end>571</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Improving Robustness of Neural Machine Translation with Multi-task Learning
%A Zhou, Shuyan
%A Zeng, Xiangkai
%A Zhou, Yingqi
%A Anastasopoulos, Antonios
%A Neubig, Graham
%S Proceedings of the Fourth Conference on Machine Translation (Volume 2: Shared Task Papers, Day 1)
%D 2019
%8 August
%I Association for Computational Linguistics
%C Florence, Italy
%F zhou-etal-2019-improving
%X While neural machine translation (NMT) achieves remarkable performance on clean, in-domain text, performance is known to degrade drastically when facing text which is full of typos, grammatical errors and other varieties of noise. In this work, we propose a multi-task learning algorithm for transformer-based MT systems that is more resilient to this noise. We describe our submission to the WMT 2019 Robustness shared task based on this method. Our model achieves a BLEU score of 32.8 on the shared task French to English dataset, which is 7.1 BLEU points higher than the baseline vanilla transformer trained with clean text.
%R 10.18653/v1/W19-5368
%U https://aclanthology.org/W19-5368
%U https://doi.org/10.18653/v1/W19-5368
%P 565-571
Markdown (Informal)
[Improving Robustness of Neural Machine Translation with Multi-task Learning](https://aclanthology.org/W19-5368) (Zhou et al., WMT 2019)
ACL