@inproceedings{zeng-etal-2023-bit,
title = "{BIT}-{ACT}: An {A}ncient {C}hinese Translation System Using Data Augmentation",
author = "Zeng, Li and
Tian, Yanzhi and
Shan, Yingyu and
Guo, Yuhang",
booktitle = "Proceedings of ALT2023: Ancient Language Translation Workshop",
month = sep,
year = "2023",
address = "Macau SAR, China",
publisher = "Asia-Pacific Association for Machine Translation",
url = "https://aclanthology.org/2023.alt-1.6",
pages = "43--47",
abstract = "This paper describes a translation model for ancient Chinese to modern Chinese and English for the Evahan 2023 competition, a subtask of the Ancient Language Translation 2023 challenge. During the training of our model, we applied various data augmentation techniques and used SiKu-RoBERTa as part of our model architecture. The results indicate that back translation improves the model{'}s performance, but double back translation introduces noise and harms the model{'}s performance. Fine-tuning on the original dataset can be helpful in solving the issue.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="zeng-etal-2023-bit">
<titleInfo>
<title>BIT-ACT: An Ancient Chinese Translation System Using Data Augmentation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Li</namePart>
<namePart type="family">Zeng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yanzhi</namePart>
<namePart type="family">Tian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yingyu</namePart>
<namePart type="family">Shan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yuhang</namePart>
<namePart type="family">Guo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of ALT2023: Ancient Language Translation Workshop</title>
</titleInfo>
<originInfo>
<publisher>Asia-Pacific Association for Machine Translation</publisher>
<place>
<placeTerm type="text">Macau SAR, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes a translation model for ancient Chinese to modern Chinese and English for the Evahan 2023 competition, a subtask of the Ancient Language Translation 2023 challenge. During the training of our model, we applied various data augmentation techniques and used SiKu-RoBERTa as part of our model architecture. The results indicate that back translation improves the model’s performance, but double back translation introduces noise and harms the model’s performance. Fine-tuning on the original dataset can be helpful in solving the issue.</abstract>
<identifier type="citekey">zeng-etal-2023-bit</identifier>
<location>
<url>https://aclanthology.org/2023.alt-1.6</url>
</location>
<part>
<date>2023-09</date>
<extent unit="page">
<start>43</start>
<end>47</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T BIT-ACT: An Ancient Chinese Translation System Using Data Augmentation
%A Zeng, Li
%A Tian, Yanzhi
%A Shan, Yingyu
%A Guo, Yuhang
%S Proceedings of ALT2023: Ancient Language Translation Workshop
%D 2023
%8 September
%I Asia-Pacific Association for Machine Translation
%C Macau SAR, China
%F zeng-etal-2023-bit
%X This paper describes a translation model for ancient Chinese to modern Chinese and English for the Evahan 2023 competition, a subtask of the Ancient Language Translation 2023 challenge. During the training of our model, we applied various data augmentation techniques and used SiKu-RoBERTa as part of our model architecture. The results indicate that back translation improves the model’s performance, but double back translation introduces noise and harms the model’s performance. Fine-tuning on the original dataset can be helpful in solving the issue.
%U https://aclanthology.org/2023.alt-1.6
%P 43-47
Markdown (Informal)
[BIT-ACT: An Ancient Chinese Translation System Using Data Augmentation](https://aclanthology.org/2023.alt-1.6) (Zeng et al., alt 2023)
ACL