@inproceedings{tian-etal-2020-train,
title = "Train Once, and Decode As You Like",
author = "Tian, Chao and
Wang, Yifei and
Cheng, Hao and
Lian, Yijiang and
Zhang, Zhihua",
editor = "Scott, Donia and
Bel, Nuria and
Zong, Chengqing",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://aclanthology.org/2020.coling-main.25",
doi = "10.18653/v1/2020.coling-main.25",
pages = "280--293",
abstract = "In this paper we propose a unified approach for supporting different generation manners of machine translation, including autoregressive, semi-autoregressive, and refinement-based non-autoregressive models. Our approach works by repeatedly selecting positions and generating tokens at these selected positions. After being trained once, our approach achieves better or competitive translation performance compared with some strong task-specific baseline models in all the settings. This generalization ability benefits mainly from the new training objective that we propose. We validate our approach on the WMT{'}14 English-German and IWSLT{'}14 German-English translation tasks. The experimental results are encouraging.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="tian-etal-2020-train">
<titleInfo>
<title>Train Once, and Decode As You Like</title>
</titleInfo>
<name type="personal">
<namePart type="given">Chao</namePart>
<namePart type="family">Tian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yifei</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hao</namePart>
<namePart type="family">Cheng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yijiang</namePart>
<namePart type="family">Lian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhihua</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 28th International Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Donia</namePart>
<namePart type="family">Scott</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nuria</namePart>
<namePart type="family">Bel</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chengqing</namePart>
<namePart type="family">Zong</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>International Committee on Computational Linguistics</publisher>
<place>
<placeTerm type="text">Barcelona, Spain (Online)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper we propose a unified approach for supporting different generation manners of machine translation, including autoregressive, semi-autoregressive, and refinement-based non-autoregressive models. Our approach works by repeatedly selecting positions and generating tokens at these selected positions. After being trained once, our approach achieves better or competitive translation performance compared with some strong task-specific baseline models in all the settings. This generalization ability benefits mainly from the new training objective that we propose. We validate our approach on the WMT’14 English-German and IWSLT’14 German-English translation tasks. The experimental results are encouraging.</abstract>
<identifier type="citekey">tian-etal-2020-train</identifier>
<identifier type="doi">10.18653/v1/2020.coling-main.25</identifier>
<location>
<url>https://aclanthology.org/2020.coling-main.25</url>
</location>
<part>
<date>2020-12</date>
<extent unit="page">
<start>280</start>
<end>293</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Train Once, and Decode As You Like
%A Tian, Chao
%A Wang, Yifei
%A Cheng, Hao
%A Lian, Yijiang
%A Zhang, Zhihua
%Y Scott, Donia
%Y Bel, Nuria
%Y Zong, Chengqing
%S Proceedings of the 28th International Conference on Computational Linguistics
%D 2020
%8 December
%I International Committee on Computational Linguistics
%C Barcelona, Spain (Online)
%F tian-etal-2020-train
%X In this paper we propose a unified approach for supporting different generation manners of machine translation, including autoregressive, semi-autoregressive, and refinement-based non-autoregressive models. Our approach works by repeatedly selecting positions and generating tokens at these selected positions. After being trained once, our approach achieves better or competitive translation performance compared with some strong task-specific baseline models in all the settings. This generalization ability benefits mainly from the new training objective that we propose. We validate our approach on the WMT’14 English-German and IWSLT’14 German-English translation tasks. The experimental results are encouraging.
%R 10.18653/v1/2020.coling-main.25
%U https://aclanthology.org/2020.coling-main.25
%U https://doi.org/10.18653/v1/2020.coling-main.25
%P 280-293
Markdown (Informal)
[Train Once, and Decode As You Like](https://aclanthology.org/2020.coling-main.25) (Tian et al., COLING 2020)
ACL
- Chao Tian, Yifei Wang, Hao Cheng, Yijiang Lian, and Zhihua Zhang. 2020. Train Once, and Decode As You Like. In Proceedings of the 28th International Conference on Computational Linguistics, pages 280–293, Barcelona, Spain (Online). International Committee on Computational Linguistics.