@inproceedings{sekizawa-etal-2017-improving,
title = "Improving {J}apanese-to-{E}nglish Neural Machine Translation by Paraphrasing the Target Language",
author = "Sekizawa, Yuuki and
Kajiwara, Tomoyuki and
Komachi, Mamoru",
editor = "Nakazawa, Toshiaki and
Goto, Isao",
booktitle = "Proceedings of the 4th Workshop on {A}sian Translation ({WAT}2017)",
month = nov,
year = "2017",
address = "Taipei, Taiwan",
publisher = "Asian Federation of Natural Language Processing",
url = "https://aclanthology.org/W17-5703",
pages = "64--69",
abstract = "Neural machine translation (NMT) produces sentences that are more fluent than those produced by statistical machine translation (SMT). However, NMT has a very high computational cost because of the high dimensionality of the output layer. Generally, NMT restricts the size of vocabulary, which results in infrequent words being treated as out-of-vocabulary (OOV) and degrades the performance of the translation. In evaluation, we achieved a statistically significant BLEU score improvement of 0.55-0.77 over the baselines including the state-of-the-art method.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="sekizawa-etal-2017-improving">
<titleInfo>
<title>Improving Japanese-to-English Neural Machine Translation by Paraphrasing the Target Language</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yuuki</namePart>
<namePart type="family">Sekizawa</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tomoyuki</namePart>
<namePart type="family">Kajiwara</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mamoru</namePart>
<namePart type="family">Komachi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 4th Workshop on Asian Translation (WAT2017)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Toshiaki</namePart>
<namePart type="family">Nakazawa</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Isao</namePart>
<namePart type="family">Goto</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Asian Federation of Natural Language Processing</publisher>
<place>
<placeTerm type="text">Taipei, Taiwan</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Neural machine translation (NMT) produces sentences that are more fluent than those produced by statistical machine translation (SMT). However, NMT has a very high computational cost because of the high dimensionality of the output layer. Generally, NMT restricts the size of vocabulary, which results in infrequent words being treated as out-of-vocabulary (OOV) and degrades the performance of the translation. In evaluation, we achieved a statistically significant BLEU score improvement of 0.55-0.77 over the baselines including the state-of-the-art method.</abstract>
<identifier type="citekey">sekizawa-etal-2017-improving</identifier>
<location>
<url>https://aclanthology.org/W17-5703</url>
</location>
<part>
<date>2017-11</date>
<extent unit="page">
<start>64</start>
<end>69</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Improving Japanese-to-English Neural Machine Translation by Paraphrasing the Target Language
%A Sekizawa, Yuuki
%A Kajiwara, Tomoyuki
%A Komachi, Mamoru
%Y Nakazawa, Toshiaki
%Y Goto, Isao
%S Proceedings of the 4th Workshop on Asian Translation (WAT2017)
%D 2017
%8 November
%I Asian Federation of Natural Language Processing
%C Taipei, Taiwan
%F sekizawa-etal-2017-improving
%X Neural machine translation (NMT) produces sentences that are more fluent than those produced by statistical machine translation (SMT). However, NMT has a very high computational cost because of the high dimensionality of the output layer. Generally, NMT restricts the size of vocabulary, which results in infrequent words being treated as out-of-vocabulary (OOV) and degrades the performance of the translation. In evaluation, we achieved a statistically significant BLEU score improvement of 0.55-0.77 over the baselines including the state-of-the-art method.
%U https://aclanthology.org/W17-5703
%P 64-69
Markdown (Informal)
[Improving Japanese-to-English Neural Machine Translation by Paraphrasing the Target Language](https://aclanthology.org/W17-5703) (Sekizawa et al., WAT 2017)
ACL