@inproceedings{xiao-etal-2023-example,
title = "Example-Based Machine Translation with a Multi-Sentence Construction Transformer Architecture",
author = "Xiao, Haozhe and
Zhou, Yifei and
Lepage, Yves",
editor = "Breitholtz, Ellen and
Lappin, Shalom and
Loaiciga, Sharid and
Ilinykh, Nikolai and
Dobnik, Simon",
booktitle = "Proceedings of the 2023 CLASP Conference on Learning with Small Data (LSD)",
month = sep,
year = "2023",
address = "Gothenburg, Sweden",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.clasp-1.9",
pages = "72--80",
abstract = "Neural Machine Translation (NMT) has now attained state-of-art performance on large-scale data. However, it does not achieve the best translation results on small data sets. Example-Based Machine Translation (EBMT) is an approach to machine translation in which existing examples in a database are retrieved and modified to generate new translations. To combine EBMT with NMT, an architecture based on the Transformer model is proposed. We conduct two experiments respectively using limited amounts of data, one on an English-French bilingual dataset and the other one on a multilingual dataset with six languages (English, French, German, Chinese, Japanese and Russian). On the bilingual task, our method achieves an accuracy of 96.5 and a BLEU score of 98.8. On the multilingual task, it also outperforms OpenNMT in terms of BLEU scores.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="xiao-etal-2023-example">
<titleInfo>
<title>Example-Based Machine Translation with a Multi-Sentence Construction Transformer Architecture</title>
</titleInfo>
<name type="personal">
<namePart type="given">Haozhe</namePart>
<namePart type="family">Xiao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yifei</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yves</namePart>
<namePart type="family">Lepage</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2023 CLASP Conference on Learning with Small Data (LSD)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ellen</namePart>
<namePart type="family">Breitholtz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shalom</namePart>
<namePart type="family">Lappin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sharid</namePart>
<namePart type="family">Loaiciga</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nikolai</namePart>
<namePart type="family">Ilinykh</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Simon</namePart>
<namePart type="family">Dobnik</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Gothenburg, Sweden</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Neural Machine Translation (NMT) has now attained state-of-art performance on large-scale data. However, it does not achieve the best translation results on small data sets. Example-Based Machine Translation (EBMT) is an approach to machine translation in which existing examples in a database are retrieved and modified to generate new translations. To combine EBMT with NMT, an architecture based on the Transformer model is proposed. We conduct two experiments respectively using limited amounts of data, one on an English-French bilingual dataset and the other one on a multilingual dataset with six languages (English, French, German, Chinese, Japanese and Russian). On the bilingual task, our method achieves an accuracy of 96.5 and a BLEU score of 98.8. On the multilingual task, it also outperforms OpenNMT in terms of BLEU scores.</abstract>
<identifier type="citekey">xiao-etal-2023-example</identifier>
<location>
<url>https://aclanthology.org/2023.clasp-1.9</url>
</location>
<part>
<date>2023-09</date>
<extent unit="page">
<start>72</start>
<end>80</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Example-Based Machine Translation with a Multi-Sentence Construction Transformer Architecture
%A Xiao, Haozhe
%A Zhou, Yifei
%A Lepage, Yves
%Y Breitholtz, Ellen
%Y Lappin, Shalom
%Y Loaiciga, Sharid
%Y Ilinykh, Nikolai
%Y Dobnik, Simon
%S Proceedings of the 2023 CLASP Conference on Learning with Small Data (LSD)
%D 2023
%8 September
%I Association for Computational Linguistics
%C Gothenburg, Sweden
%F xiao-etal-2023-example
%X Neural Machine Translation (NMT) has now attained state-of-art performance on large-scale data. However, it does not achieve the best translation results on small data sets. Example-Based Machine Translation (EBMT) is an approach to machine translation in which existing examples in a database are retrieved and modified to generate new translations. To combine EBMT with NMT, an architecture based on the Transformer model is proposed. We conduct two experiments respectively using limited amounts of data, one on an English-French bilingual dataset and the other one on a multilingual dataset with six languages (English, French, German, Chinese, Japanese and Russian). On the bilingual task, our method achieves an accuracy of 96.5 and a BLEU score of 98.8. On the multilingual task, it also outperforms OpenNMT in terms of BLEU scores.
%U https://aclanthology.org/2023.clasp-1.9
%P 72-80
Markdown (Informal)
[Example-Based Machine Translation with a Multi-Sentence Construction Transformer Architecture](https://aclanthology.org/2023.clasp-1.9) (Xiao et al., CLASP 2023)
ACL