@inproceedings{raj-2025-challenge,
title = "Challenge Track: {L}o{RA}s in All Directions: Directional Adapters and Noisy-Channel Reranking for {I}ndic {MT}",
author = "Raj, Sajay",
editor = "Shukla, Ankita and
Kumar, Sandeep and
Bedi, Amrit Singh and
Chakraborty, Tanmoy",
booktitle = "Proceedings of the 1st Workshop on Multimodal Models for Low-Resource Contexts and Social Impact (MMLoSo 2025)",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2025.mmloso-1.10/",
pages = "101--105",
ISBN = "979-8-89176-311-1",
abstract = "Low-resource machine translation for Indic languages remains challenging, especially when high-resource languages such as Hindi and English must be translated to and from very low-resource, grammatically rich languages like Bhili, Mundari, Santali, and Gondi.We describe our winning system for the MMLoSo 2025 Shared Task in this setting. We start from a strong pretrained Indic MT backbone, IndicTrans2, and fine-tune it jointly on all translation directions, pushing the model close to memorization under strict data constraints. On top of this backbone, we add direction-specific low-rank adapters (LoRA) that allow each language pair to specialize while still sharing most parameters. At inference time, we further couple these directional adapters through a noisy-channel objective, in which forward and reverse models jointly score a set of candidate translations, encouraging outputs that are both fluent in the target language and informative about the source. This combination of shared pretraining, directional parameter-efficient adaptation, and noisy-channel reranking substantially improves over a strong fine-tuned baseline and achieves the top overall score on the shared-task leaderboard. We release our codebase at https://github.com/SajayR/LoRA-in-All-Directions"
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="raj-2025-challenge">
<titleInfo>
<title>Challenge Track: LoRAs in All Directions: Directional Adapters and Noisy-Channel Reranking for Indic MT</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sajay</namePart>
<namePart type="family">Raj</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 1st Workshop on Multimodal Models for Low-Resource Contexts and Social Impact (MMLoSo 2025)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ankita</namePart>
<namePart type="family">Shukla</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sandeep</namePart>
<namePart type="family">Kumar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amrit</namePart>
<namePart type="given">Singh</namePart>
<namePart type="family">Bedi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tanmoy</namePart>
<namePart type="family">Chakraborty</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Mumbai, India</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-311-1</identifier>
</relatedItem>
<abstract>Low-resource machine translation for Indic languages remains challenging, especially when high-resource languages such as Hindi and English must be translated to and from very low-resource, grammatically rich languages like Bhili, Mundari, Santali, and Gondi.We describe our winning system for the MMLoSo 2025 Shared Task in this setting. We start from a strong pretrained Indic MT backbone, IndicTrans2, and fine-tune it jointly on all translation directions, pushing the model close to memorization under strict data constraints. On top of this backbone, we add direction-specific low-rank adapters (LoRA) that allow each language pair to specialize while still sharing most parameters. At inference time, we further couple these directional adapters through a noisy-channel objective, in which forward and reverse models jointly score a set of candidate translations, encouraging outputs that are both fluent in the target language and informative about the source. This combination of shared pretraining, directional parameter-efficient adaptation, and noisy-channel reranking substantially improves over a strong fine-tuned baseline and achieves the top overall score on the shared-task leaderboard. We release our codebase at https://github.com/SajayR/LoRA-in-All-Directions</abstract>
<identifier type="citekey">raj-2025-challenge</identifier>
<location>
<url>https://aclanthology.org/2025.mmloso-1.10/</url>
</location>
<part>
<date>2025-12</date>
<extent unit="page">
<start>101</start>
<end>105</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Challenge Track: LoRAs in All Directions: Directional Adapters and Noisy-Channel Reranking for Indic MT
%A Raj, Sajay
%Y Shukla, Ankita
%Y Kumar, Sandeep
%Y Bedi, Amrit Singh
%Y Chakraborty, Tanmoy
%S Proceedings of the 1st Workshop on Multimodal Models for Low-Resource Contexts and Social Impact (MMLoSo 2025)
%D 2025
%8 December
%I Association for Computational Linguistics
%C Mumbai, India
%@ 979-8-89176-311-1
%F raj-2025-challenge
%X Low-resource machine translation for Indic languages remains challenging, especially when high-resource languages such as Hindi and English must be translated to and from very low-resource, grammatically rich languages like Bhili, Mundari, Santali, and Gondi.We describe our winning system for the MMLoSo 2025 Shared Task in this setting. We start from a strong pretrained Indic MT backbone, IndicTrans2, and fine-tune it jointly on all translation directions, pushing the model close to memorization under strict data constraints. On top of this backbone, we add direction-specific low-rank adapters (LoRA) that allow each language pair to specialize while still sharing most parameters. At inference time, we further couple these directional adapters through a noisy-channel objective, in which forward and reverse models jointly score a set of candidate translations, encouraging outputs that are both fluent in the target language and informative about the source. This combination of shared pretraining, directional parameter-efficient adaptation, and noisy-channel reranking substantially improves over a strong fine-tuned baseline and achieves the top overall score on the shared-task leaderboard. We release our codebase at https://github.com/SajayR/LoRA-in-All-Directions
%U https://aclanthology.org/2025.mmloso-1.10/
%P 101-105
Markdown (Informal)
[Challenge Track: LoRAs in All Directions: Directional Adapters and Noisy-Channel Reranking for Indic MT](https://aclanthology.org/2025.mmloso-1.10/) (Raj, MMLoSo 2025)
ACL