@inproceedings{dabre-etal-2020-multilingual,
title = "Multilingual Neural Machine Translation",
author = "Dabre, Raj and
Chu, Chenhui and
Kunchukuttan, Anoop",
editor = "Specia, Lucia and
Beck, Daniel",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics: Tutorial Abstracts",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee for Computational Linguistics",
url = "https://aclanthology.org/2020.coling-tutorials.3",
doi = "10.18653/v1/2020.coling-tutorials.3",
pages = "16--21",
abstract = "The advent of neural machine translation (NMT) has opened up exciting research in building multilingual translation systems i.e. translation models that can handle more than one language pair. Many advances have been made which have enabled (1) improving translation for low-resource languages via transfer learning from high resource languages; and (2) building compact translation models spanning multiple languages. In this tutorial, we will cover the latest advances in NMT approaches that leverage multilingualism, especially to enhance low-resource translation. In particular, we will focus on the following topics: modeling parameter sharing for multi-way models, massively multilingual models, training protocols, language divergence, transfer learning, zero-shot/zero-resource learning, pivoting, multilingual pre-training and multi-source translation.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="dabre-etal-2020-multilingual">
<titleInfo>
<title>Multilingual Neural Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Raj</namePart>
<namePart type="family">Dabre</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chenhui</namePart>
<namePart type="family">Chu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Anoop</namePart>
<namePart type="family">Kunchukuttan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 28th International Conference on Computational Linguistics: Tutorial Abstracts</title>
</titleInfo>
<name type="personal">
<namePart type="given">Lucia</namePart>
<namePart type="family">Specia</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Daniel</namePart>
<namePart type="family">Beck</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>International Committee for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Barcelona, Spain (Online)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>The advent of neural machine translation (NMT) has opened up exciting research in building multilingual translation systems i.e. translation models that can handle more than one language pair. Many advances have been made which have enabled (1) improving translation for low-resource languages via transfer learning from high resource languages; and (2) building compact translation models spanning multiple languages. In this tutorial, we will cover the latest advances in NMT approaches that leverage multilingualism, especially to enhance low-resource translation. In particular, we will focus on the following topics: modeling parameter sharing for multi-way models, massively multilingual models, training protocols, language divergence, transfer learning, zero-shot/zero-resource learning, pivoting, multilingual pre-training and multi-source translation.</abstract>
<identifier type="citekey">dabre-etal-2020-multilingual</identifier>
<identifier type="doi">10.18653/v1/2020.coling-tutorials.3</identifier>
<location>
<url>https://aclanthology.org/2020.coling-tutorials.3</url>
</location>
<part>
<date>2020-12</date>
<extent unit="page">
<start>16</start>
<end>21</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Multilingual Neural Machine Translation
%A Dabre, Raj
%A Chu, Chenhui
%A Kunchukuttan, Anoop
%Y Specia, Lucia
%Y Beck, Daniel
%S Proceedings of the 28th International Conference on Computational Linguistics: Tutorial Abstracts
%D 2020
%8 December
%I International Committee for Computational Linguistics
%C Barcelona, Spain (Online)
%F dabre-etal-2020-multilingual
%X The advent of neural machine translation (NMT) has opened up exciting research in building multilingual translation systems i.e. translation models that can handle more than one language pair. Many advances have been made which have enabled (1) improving translation for low-resource languages via transfer learning from high resource languages; and (2) building compact translation models spanning multiple languages. In this tutorial, we will cover the latest advances in NMT approaches that leverage multilingualism, especially to enhance low-resource translation. In particular, we will focus on the following topics: modeling parameter sharing for multi-way models, massively multilingual models, training protocols, language divergence, transfer learning, zero-shot/zero-resource learning, pivoting, multilingual pre-training and multi-source translation.
%R 10.18653/v1/2020.coling-tutorials.3
%U https://aclanthology.org/2020.coling-tutorials.3
%U https://doi.org/10.18653/v1/2020.coling-tutorials.3
%P 16-21
Markdown (Informal)
[Multilingual Neural Machine Translation](https://aclanthology.org/2020.coling-tutorials.3) (Dabre et al., COLING 2020)
ACL
- Raj Dabre, Chenhui Chu, and Anoop Kunchukuttan. 2020. Multilingual Neural Machine Translation. In Proceedings of the 28th International Conference on Computational Linguistics: Tutorial Abstracts, pages 16–21, Barcelona, Spain (Online). International Committee for Computational Linguistics.