@inproceedings{acarcicek-etal-2020-filtering,
title = "Filtering Noisy Parallel Corpus using Transformers with Proxy Task Learning",
author = "A{\c{c}}ar{\c{c}}i{\c{c}}ek, Haluk and
{\c{C}}olako{\u{g}}lu, Talha and
Aktan Hatipo{\u{g}}lu, P{\i}nar Ece and
Huang, Chong Hsuan and
Peng, Wei",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.105",
pages = "940--946",
abstract = "This paper illustrates Huawei{'}s submission to the WMT20 low-resource parallel corpus filtering shared task. Our approach focuses on developing a proxy task learner on top of a transformer-based multilingual pre-trained language model to boost the filtering capability for noisy parallel corpora. Such a supervised task also helps us to iterate much more quickly than using an existing neural machine translation system to perform the same task. After performing empirical analyses of the finetuning task, we benchmark our approach by comparing the results with past years{'} state-of-theart records. This paper wraps up with a discussion of limitations and future work. The scripts for this study will be made publicly available.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="acarcicek-etal-2020-filtering">
<titleInfo>
<title>Filtering Noisy Parallel Corpus using Transformers with Proxy Task Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Haluk</namePart>
<namePart type="family">Açarçiçek</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Talha</namePart>
<namePart type="family">Çolakoğlu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pınar</namePart>
<namePart type="given">Ece</namePart>
<namePart type="family">Aktan Hatipoğlu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chong</namePart>
<namePart type="given">Hsuan</namePart>
<namePart type="family">Huang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wei</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper illustrates Huawei’s submission to the WMT20 low-resource parallel corpus filtering shared task. Our approach focuses on developing a proxy task learner on top of a transformer-based multilingual pre-trained language model to boost the filtering capability for noisy parallel corpora. Such a supervised task also helps us to iterate much more quickly than using an existing neural machine translation system to perform the same task. After performing empirical analyses of the finetuning task, we benchmark our approach by comparing the results with past years’ state-of-theart records. This paper wraps up with a discussion of limitations and future work. The scripts for this study will be made publicly available.</abstract>
<identifier type="citekey">acarcicek-etal-2020-filtering</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.105</url>
</location>
<part>
<date>2020-11</date>
<extent unit="page">
<start>940</start>
<end>946</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Filtering Noisy Parallel Corpus using Transformers with Proxy Task Learning
%A Açarçiçek, Haluk
%A Çolakoğlu, Talha
%A Aktan Hatipoğlu, Pınar Ece
%A Huang, Chong Hsuan
%A Peng, Wei
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 November
%I Association for Computational Linguistics
%C Online
%F acarcicek-etal-2020-filtering
%X This paper illustrates Huawei’s submission to the WMT20 low-resource parallel corpus filtering shared task. Our approach focuses on developing a proxy task learner on top of a transformer-based multilingual pre-trained language model to boost the filtering capability for noisy parallel corpora. Such a supervised task also helps us to iterate much more quickly than using an existing neural machine translation system to perform the same task. After performing empirical analyses of the finetuning task, we benchmark our approach by comparing the results with past years’ state-of-theart records. This paper wraps up with a discussion of limitations and future work. The scripts for this study will be made publicly available.
%U https://aclanthology.org/2020.wmt-1.105
%P 940-946
Markdown (Informal)
[Filtering Noisy Parallel Corpus using Transformers with Proxy Task Learning](https://aclanthology.org/2020.wmt-1.105) (Açarçiçek et al., WMT 2020)
ACL