@inproceedings{bhaskar-krishnamurthy-2024-yes,
title = "Yes-{MT}{'}s Submission to the Low-Resource {I}ndic Language Translation Shared Task in {WMT} 2024",
author = "Bhaskar, Yash and
Krishnamurthy, Parameswari",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Ninth Conference on Machine Translation",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.wmt-1.71",
doi = "10.18653/v1/2024.wmt-1.71",
pages = "788--792",
abstract = "This paper presents the systems submitted by the Yes-MT team for the Low-Resource Indic Language Translation Shared Task at WMT 2024, focusing on translating between English and the Assamese, Mizo, Khasi, and Manipuri languages. The experiments explored various approaches, including fine-tuning pre-trained models like mT5 and IndicBart in both Multilingual and Monolingual settings, LoRA finetune IndicTrans2, zero-shot and few-shot prompting with large language models (LLMs) like Llama 3 and Mixtral 8x7b, LoRA Supervised Fine Tuning Llama 3, and training Transformers from scratch. The results were evaluated on the WMT23 Low-Resource Indic Language Translation Shared Task{'}s test data using SacreBLEU and CHRF highlighting the challenges of low-resource translation and show the potential of LLMs for these tasks, particularly with fine-tuning.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="bhaskar-krishnamurthy-2024-yes">
<titleInfo>
<title>Yes-MT’s Submission to the Low-Resource Indic Language Translation Shared Task in WMT 2024</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yash</namePart>
<namePart type="family">Bhaskar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Parameswari</namePart>
<namePart type="family">Krishnamurthy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2024-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Ninth Conference on Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Barry</namePart>
<namePart type="family">Haddow</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tom</namePart>
<namePart type="family">Kocmi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Philipp</namePart>
<namePart type="family">Koehn</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christof</namePart>
<namePart type="family">Monz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Miami, Florida, USA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper presents the systems submitted by the Yes-MT team for the Low-Resource Indic Language Translation Shared Task at WMT 2024, focusing on translating between English and the Assamese, Mizo, Khasi, and Manipuri languages. The experiments explored various approaches, including fine-tuning pre-trained models like mT5 and IndicBart in both Multilingual and Monolingual settings, LoRA finetune IndicTrans2, zero-shot and few-shot prompting with large language models (LLMs) like Llama 3 and Mixtral 8x7b, LoRA Supervised Fine Tuning Llama 3, and training Transformers from scratch. The results were evaluated on the WMT23 Low-Resource Indic Language Translation Shared Task’s test data using SacreBLEU and CHRF highlighting the challenges of low-resource translation and show the potential of LLMs for these tasks, particularly with fine-tuning.</abstract>
<identifier type="citekey">bhaskar-krishnamurthy-2024-yes</identifier>
<identifier type="doi">10.18653/v1/2024.wmt-1.71</identifier>
<location>
<url>https://aclanthology.org/2024.wmt-1.71</url>
</location>
<part>
<date>2024-11</date>
<extent unit="page">
<start>788</start>
<end>792</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Yes-MT’s Submission to the Low-Resource Indic Language Translation Shared Task in WMT 2024
%A Bhaskar, Yash
%A Krishnamurthy, Parameswari
%Y Haddow, Barry
%Y Kocmi, Tom
%Y Koehn, Philipp
%Y Monz, Christof
%S Proceedings of the Ninth Conference on Machine Translation
%D 2024
%8 November
%I Association for Computational Linguistics
%C Miami, Florida, USA
%F bhaskar-krishnamurthy-2024-yes
%X This paper presents the systems submitted by the Yes-MT team for the Low-Resource Indic Language Translation Shared Task at WMT 2024, focusing on translating between English and the Assamese, Mizo, Khasi, and Manipuri languages. The experiments explored various approaches, including fine-tuning pre-trained models like mT5 and IndicBart in both Multilingual and Monolingual settings, LoRA finetune IndicTrans2, zero-shot and few-shot prompting with large language models (LLMs) like Llama 3 and Mixtral 8x7b, LoRA Supervised Fine Tuning Llama 3, and training Transformers from scratch. The results were evaluated on the WMT23 Low-Resource Indic Language Translation Shared Task’s test data using SacreBLEU and CHRF highlighting the challenges of low-resource translation and show the potential of LLMs for these tasks, particularly with fine-tuning.
%R 10.18653/v1/2024.wmt-1.71
%U https://aclanthology.org/2024.wmt-1.71
%U https://doi.org/10.18653/v1/2024.wmt-1.71
%P 788-792
Markdown (Informal)
[Yes-MT’s Submission to the Low-Resource Indic Language Translation Shared Task in WMT 2024](https://aclanthology.org/2024.wmt-1.71) (Bhaskar & Krishnamurthy, WMT 2024)
ACL