@inproceedings{morishita-etal-2022-nt5,
title = "{NT}5 at {WMT} 2022 General Translation Task",
author = "Morishita, Makoto and
Kudo, Keito and
Oka, Yui and
Chousa, Katsuki and
Kiyono, Shun and
Takase, Sho and
Suzuki, Jun",
booktitle = "Proceedings of the Seventh Conference on Machine Translation (WMT)",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates (Hybrid)",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.wmt-1.25",
pages = "318--325",
abstract = "This paper describes the NTT-Tohoku-TokyoTech-RIKEN (NT5) team{'}s submission system for the WMT{'}22 general translation task.This year, we focused on the English-to-Japanese and Japanese-to-English translation tracks.Our submission system consists of an ensemble of Transformer models with several extensions.We also applied data augmentation and selection techniques to obtain potentially effective training data for training individual Transformer models in the pre-training and fine-tuning scheme.Additionally, we report our trial of incorporating a reranking module and the reevaluated results of several techniques that have been recently developed and published.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="morishita-etal-2022-nt5">
<titleInfo>
<title>NT5 at WMT 2022 General Translation Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Makoto</namePart>
<namePart type="family">Morishita</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Keito</namePart>
<namePart type="family">Kudo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yui</namePart>
<namePart type="family">Oka</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Katsuki</namePart>
<namePart type="family">Chousa</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shun</namePart>
<namePart type="family">Kiyono</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sho</namePart>
<namePart type="family">Takase</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jun</namePart>
<namePart type="family">Suzuki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Seventh Conference on Machine Translation (WMT)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Abu Dhabi, United Arab Emirates (Hybrid)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes the NTT-Tohoku-TokyoTech-RIKEN (NT5) team’s submission system for the WMT’22 general translation task.This year, we focused on the English-to-Japanese and Japanese-to-English translation tracks.Our submission system consists of an ensemble of Transformer models with several extensions.We also applied data augmentation and selection techniques to obtain potentially effective training data for training individual Transformer models in the pre-training and fine-tuning scheme.Additionally, we report our trial of incorporating a reranking module and the reevaluated results of several techniques that have been recently developed and published.</abstract>
<identifier type="citekey">morishita-etal-2022-nt5</identifier>
<location>
<url>https://aclanthology.org/2022.wmt-1.25</url>
</location>
<part>
<date>2022-12</date>
<extent unit="page">
<start>318</start>
<end>325</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T NT5 at WMT 2022 General Translation Task
%A Morishita, Makoto
%A Kudo, Keito
%A Oka, Yui
%A Chousa, Katsuki
%A Kiyono, Shun
%A Takase, Sho
%A Suzuki, Jun
%S Proceedings of the Seventh Conference on Machine Translation (WMT)
%D 2022
%8 December
%I Association for Computational Linguistics
%C Abu Dhabi, United Arab Emirates (Hybrid)
%F morishita-etal-2022-nt5
%X This paper describes the NTT-Tohoku-TokyoTech-RIKEN (NT5) team’s submission system for the WMT’22 general translation task.This year, we focused on the English-to-Japanese and Japanese-to-English translation tracks.Our submission system consists of an ensemble of Transformer models with several extensions.We also applied data augmentation and selection techniques to obtain potentially effective training data for training individual Transformer models in the pre-training and fine-tuning scheme.Additionally, we report our trial of incorporating a reranking module and the reevaluated results of several techniques that have been recently developed and published.
%U https://aclanthology.org/2022.wmt-1.25
%P 318-325
Markdown (Informal)
[NT5 at WMT 2022 General Translation Task](https://aclanthology.org/2022.wmt-1.25) (Morishita et al., WMT 2022)
ACL
- Makoto Morishita, Keito Kudo, Yui Oka, Katsuki Chousa, Shun Kiyono, Sho Takase, and Jun Suzuki. 2022. NT5 at WMT 2022 General Translation Task. In Proceedings of the Seventh Conference on Machine Translation (WMT), pages 318–325, Abu Dhabi, United Arab Emirates (Hybrid). Association for Computational Linguistics.