@inproceedings{imamura-sumita-2018-nict,
title = "{NICT} Self-Training Approach to Neural Machine Translation at {NMT}-2018",
author = "Imamura, Kenji and
Sumita, Eiichiro",
editor = "Birch, Alexandra and
Finch, Andrew and
Luong, Thang and
Neubig, Graham and
Oda, Yusuke",
booktitle = "Proceedings of the 2nd Workshop on Neural Machine Translation and Generation",
month = jul,
year = "2018",
address = "Melbourne, Australia",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-2713",
doi = "10.18653/v1/W18-2713",
pages = "110--115",
abstract = "This paper describes the NICT neural machine translation system submitted at the NMT-2018 shared task. A characteristic of our approach is the introduction of self-training. Since our self-training does not change the model structure, it does not influence the efficiency of translation, such as the translation speed. The experimental results showed that the translation quality improved not only in the sequence-to-sequence (seq-to-seq) models but also in the transformer models.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="imamura-sumita-2018-nict">
<titleInfo>
<title>NICT Self-Training Approach to Neural Machine Translation at NMT-2018</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kenji</namePart>
<namePart type="family">Imamura</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Eiichiro</namePart>
<namePart type="family">Sumita</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2nd Workshop on Neural Machine Translation and Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Alexandra</namePart>
<namePart type="family">Birch</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Andrew</namePart>
<namePart type="family">Finch</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Thang</namePart>
<namePart type="family">Luong</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Graham</namePart>
<namePart type="family">Neubig</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yusuke</namePart>
<namePart type="family">Oda</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Melbourne, Australia</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes the NICT neural machine translation system submitted at the NMT-2018 shared task. A characteristic of our approach is the introduction of self-training. Since our self-training does not change the model structure, it does not influence the efficiency of translation, such as the translation speed. The experimental results showed that the translation quality improved not only in the sequence-to-sequence (seq-to-seq) models but also in the transformer models.</abstract>
<identifier type="citekey">imamura-sumita-2018-nict</identifier>
<identifier type="doi">10.18653/v1/W18-2713</identifier>
<location>
<url>https://aclanthology.org/W18-2713</url>
</location>
<part>
<date>2018-07</date>
<extent unit="page">
<start>110</start>
<end>115</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T NICT Self-Training Approach to Neural Machine Translation at NMT-2018
%A Imamura, Kenji
%A Sumita, Eiichiro
%Y Birch, Alexandra
%Y Finch, Andrew
%Y Luong, Thang
%Y Neubig, Graham
%Y Oda, Yusuke
%S Proceedings of the 2nd Workshop on Neural Machine Translation and Generation
%D 2018
%8 July
%I Association for Computational Linguistics
%C Melbourne, Australia
%F imamura-sumita-2018-nict
%X This paper describes the NICT neural machine translation system submitted at the NMT-2018 shared task. A characteristic of our approach is the introduction of self-training. Since our self-training does not change the model structure, it does not influence the efficiency of translation, such as the translation speed. The experimental results showed that the translation quality improved not only in the sequence-to-sequence (seq-to-seq) models but also in the transformer models.
%R 10.18653/v1/W18-2713
%U https://aclanthology.org/W18-2713
%U https://doi.org/10.18653/v1/W18-2713
%P 110-115
Markdown (Informal)
[NICT Self-Training Approach to Neural Machine Translation at NMT-2018](https://aclanthology.org/W18-2713) (Imamura & Sumita, NGT 2018)
ACL