@inproceedings{mhaskar-bhattacharyya-2021-pivot,
title = "Pivot Based Transfer Learning for Neural Machine Translation: {CFILT} {IITB} @ {WMT} 2021 Triangular {MT}",
author = "Mhaskar, Shivam and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the Sixth Conference on Machine Translation",
month = nov,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.wmt-1.39",
pages = "336--340",
abstract = "In this paper, we discuss the various techniques that we used to implement the Russian-Chinese machine translation system for the Triangular MT task at WMT 2021. Neural Machine translation systems based on transformer architecture have an encoder-decoder architecture, which are trained end-to-end and require a large amount of parallel corpus to produce good quality translations. This is the reason why neural machine translation systems are referred to as \textit{data hungry}. Such a large amount of parallel corpus is majorly available for language pairs which include English and not for non-English language pairs. This is a major problem in building neural machine translation systems for non-English language pairs. We try to utilize the resources of the English language to improve the translation of non-English language pairs. We use the pivot language, that is English, to leverage transfer learning to improve the quality of Russian-Chinese translation. Compared to the baseline transformer-based neural machine translation system, we observe that the pivot language-based transfer learning technique gives a higher BLEU score.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="mhaskar-bhattacharyya-2021-pivot">
<titleInfo>
<title>Pivot Based Transfer Learning for Neural Machine Translation: CFILT IITB @ WMT 2021 Triangular MT</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shivam</namePart>
<namePart type="family">Mhaskar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pushpak</namePart>
<namePart type="family">Bhattacharyya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Sixth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper, we discuss the various techniques that we used to implement the Russian-Chinese machine translation system for the Triangular MT task at WMT 2021. Neural Machine translation systems based on transformer architecture have an encoder-decoder architecture, which are trained end-to-end and require a large amount of parallel corpus to produce good quality translations. This is the reason why neural machine translation systems are referred to as data hungry. Such a large amount of parallel corpus is majorly available for language pairs which include English and not for non-English language pairs. This is a major problem in building neural machine translation systems for non-English language pairs. We try to utilize the resources of the English language to improve the translation of non-English language pairs. We use the pivot language, that is English, to leverage transfer learning to improve the quality of Russian-Chinese translation. Compared to the baseline transformer-based neural machine translation system, we observe that the pivot language-based transfer learning technique gives a higher BLEU score.</abstract>
<identifier type="citekey">mhaskar-bhattacharyya-2021-pivot</identifier>
<location>
<url>https://aclanthology.org/2021.wmt-1.39</url>
</location>
<part>
<date>2021-11</date>
<extent unit="page">
<start>336</start>
<end>340</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Pivot Based Transfer Learning for Neural Machine Translation: CFILT IITB @ WMT 2021 Triangular MT
%A Mhaskar, Shivam
%A Bhattacharyya, Pushpak
%S Proceedings of the Sixth Conference on Machine Translation
%D 2021
%8 November
%I Association for Computational Linguistics
%C Online
%F mhaskar-bhattacharyya-2021-pivot
%X In this paper, we discuss the various techniques that we used to implement the Russian-Chinese machine translation system for the Triangular MT task at WMT 2021. Neural Machine translation systems based on transformer architecture have an encoder-decoder architecture, which are trained end-to-end and require a large amount of parallel corpus to produce good quality translations. This is the reason why neural machine translation systems are referred to as data hungry. Such a large amount of parallel corpus is majorly available for language pairs which include English and not for non-English language pairs. This is a major problem in building neural machine translation systems for non-English language pairs. We try to utilize the resources of the English language to improve the translation of non-English language pairs. We use the pivot language, that is English, to leverage transfer learning to improve the quality of Russian-Chinese translation. Compared to the baseline transformer-based neural machine translation system, we observe that the pivot language-based transfer learning technique gives a higher BLEU score.
%U https://aclanthology.org/2021.wmt-1.39
%P 336-340
Markdown (Informal)
[Pivot Based Transfer Learning for Neural Machine Translation: CFILT IITB @ WMT 2021 Triangular MT](https://aclanthology.org/2021.wmt-1.39) (Mhaskar & Bhattacharyya, WMT 2021)
ACL