@inproceedings{tokarchuk-etal-2025-angular,
title = "Angular Dispersion Accelerates $k$-Nearest Neighbors Machine Translation",
author = "Tokarchuk, Evgeniia and
Troshin, Sergey and
Niculae, Vlad",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2025.findings-emnlp.759/",
pages = "14120--14132",
ISBN = "979-8-89176-335-7",
abstract = "Augmenting neural machine translation with external memory at decoding time, in the form of $k$-nearest neighbors machine translation ($k$-NN MT), is a well-established strategy for increasing translation performance. $k$-NN MT retrieves a set of tokens that occurred in the most similar contexts recorded in a prepared data store, using hidden state representations of translation contexts as vector lookup keys. One of the main disadvantages of this method is the high computational cost and memory requirements. Since an exhaustive search is not feasible in large data stores practitioners commonly use approximate $k$-NN lookup, yet even such algorithms are a bottleneck. In contrast to research directions seeking to accelerate $k$-NN MT by reducing data store size or the number of lookup calls, we pursue an orthogonal direction based on the performance properties of approximate $k$-NN lookup data structures. In particular, we propose encouraging angular dispersion of the neural hidden representations of contexts. We show that improving dispersion leads to better balance in the retrieval data structures, accelerating retrieval and slightly improving translations."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="tokarchuk-etal-2025-angular">
<titleInfo>
<title>Angular Dispersion Accelerates k-Nearest Neighbors Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Evgeniia</namePart>
<namePart type="family">Tokarchuk</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sergey</namePart>
<namePart type="family">Troshin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vlad</namePart>
<namePart type="family">Niculae</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2025</title>
</titleInfo>
<name type="personal">
<namePart type="given">Christos</namePart>
<namePart type="family">Christodoulopoulos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tanmoy</namePart>
<namePart type="family">Chakraborty</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Carolyn</namePart>
<namePart type="family">Rose</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Violet</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Suzhou, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-335-7</identifier>
</relatedItem>
<abstract>Augmenting neural machine translation with external memory at decoding time, in the form of k-nearest neighbors machine translation (k-NN MT), is a well-established strategy for increasing translation performance. k-NN MT retrieves a set of tokens that occurred in the most similar contexts recorded in a prepared data store, using hidden state representations of translation contexts as vector lookup keys. One of the main disadvantages of this method is the high computational cost and memory requirements. Since an exhaustive search is not feasible in large data stores practitioners commonly use approximate k-NN lookup, yet even such algorithms are a bottleneck. In contrast to research directions seeking to accelerate k-NN MT by reducing data store size or the number of lookup calls, we pursue an orthogonal direction based on the performance properties of approximate k-NN lookup data structures. In particular, we propose encouraging angular dispersion of the neural hidden representations of contexts. We show that improving dispersion leads to better balance in the retrieval data structures, accelerating retrieval and slightly improving translations.</abstract>
<identifier type="citekey">tokarchuk-etal-2025-angular</identifier>
<location>
<url>https://aclanthology.org/2025.findings-emnlp.759/</url>
</location>
<part>
<date>2025-11</date>
<extent unit="page">
<start>14120</start>
<end>14132</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Angular Dispersion Accelerates k-Nearest Neighbors Machine Translation
%A Tokarchuk, Evgeniia
%A Troshin, Sergey
%A Niculae, Vlad
%Y Christodoulopoulos, Christos
%Y Chakraborty, Tanmoy
%Y Rose, Carolyn
%Y Peng, Violet
%S Findings of the Association for Computational Linguistics: EMNLP 2025
%D 2025
%8 November
%I Association for Computational Linguistics
%C Suzhou, China
%@ 979-8-89176-335-7
%F tokarchuk-etal-2025-angular
%X Augmenting neural machine translation with external memory at decoding time, in the form of k-nearest neighbors machine translation (k-NN MT), is a well-established strategy for increasing translation performance. k-NN MT retrieves a set of tokens that occurred in the most similar contexts recorded in a prepared data store, using hidden state representations of translation contexts as vector lookup keys. One of the main disadvantages of this method is the high computational cost and memory requirements. Since an exhaustive search is not feasible in large data stores practitioners commonly use approximate k-NN lookup, yet even such algorithms are a bottleneck. In contrast to research directions seeking to accelerate k-NN MT by reducing data store size or the number of lookup calls, we pursue an orthogonal direction based on the performance properties of approximate k-NN lookup data structures. In particular, we propose encouraging angular dispersion of the neural hidden representations of contexts. We show that improving dispersion leads to better balance in the retrieval data structures, accelerating retrieval and slightly improving translations.
%U https://aclanthology.org/2025.findings-emnlp.759/
%P 14120-14132
Markdown (Informal)
[Angular Dispersion Accelerates k-Nearest Neighbors Machine Translation](https://aclanthology.org/2025.findings-emnlp.759/) (Tokarchuk et al., Findings 2025)
ACL