@inproceedings{koshil-etal-2025-context,
title = "In-Context Learning of Soft Nearest Neighbor Classifiers for Intelligible Tabular Machine Learning",
author = "Koshil, Mykhailo and
Feurer, Matthias and
Eggensperger, Katharina",
editor = "Chang, Shuaichen and
Hulsebos, Madelon and
Liu, Qian and
Chen, Wenhu and
Sun, Huan",
booktitle = "Proceedings of the 4th Table Representation Learning Workshop",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2025.trl-1.15/",
doi = "10.18653/v1/2025.trl-1.15",
pages = "182--191",
ISBN = "979-8-89176-268-8",
abstract = "With in-context learning foundation models like TabPFN excelling on small supervised tabular learning tasks, it has been argued that ``boosted trees are not the best default choice when working with data in tables''. However, such foundation models are inherently black-box models that do not provide interpretable predictions. We introduce a novel learning task to train ICL models to act as a nearest neighbor algorithm, which enables intelligible inference and does not decrease performance empirically."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="koshil-etal-2025-context">
<titleInfo>
<title>In-Context Learning of Soft Nearest Neighbor Classifiers for Intelligible Tabular Machine Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Mykhailo</namePart>
<namePart type="family">Koshil</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Matthias</namePart>
<namePart type="family">Feurer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Katharina</namePart>
<namePart type="family">Eggensperger</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 4th Table Representation Learning Workshop</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shuaichen</namePart>
<namePart type="family">Chang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Madelon</namePart>
<namePart type="family">Hulsebos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Qian</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wenhu</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Huan</namePart>
<namePart type="family">Sun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Vienna, Austria</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-268-8</identifier>
</relatedItem>
<abstract>With in-context learning foundation models like TabPFN excelling on small supervised tabular learning tasks, it has been argued that “boosted trees are not the best default choice when working with data in tables”. However, such foundation models are inherently black-box models that do not provide interpretable predictions. We introduce a novel learning task to train ICL models to act as a nearest neighbor algorithm, which enables intelligible inference and does not decrease performance empirically.</abstract>
<identifier type="citekey">koshil-etal-2025-context</identifier>
<identifier type="doi">10.18653/v1/2025.trl-1.15</identifier>
<location>
<url>https://aclanthology.org/2025.trl-1.15/</url>
</location>
<part>
<date>2025-07</date>
<extent unit="page">
<start>182</start>
<end>191</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T In-Context Learning of Soft Nearest Neighbor Classifiers for Intelligible Tabular Machine Learning
%A Koshil, Mykhailo
%A Feurer, Matthias
%A Eggensperger, Katharina
%Y Chang, Shuaichen
%Y Hulsebos, Madelon
%Y Liu, Qian
%Y Chen, Wenhu
%Y Sun, Huan
%S Proceedings of the 4th Table Representation Learning Workshop
%D 2025
%8 July
%I Association for Computational Linguistics
%C Vienna, Austria
%@ 979-8-89176-268-8
%F koshil-etal-2025-context
%X With in-context learning foundation models like TabPFN excelling on small supervised tabular learning tasks, it has been argued that “boosted trees are not the best default choice when working with data in tables”. However, such foundation models are inherently black-box models that do not provide interpretable predictions. We introduce a novel learning task to train ICL models to act as a nearest neighbor algorithm, which enables intelligible inference and does not decrease performance empirically.
%R 10.18653/v1/2025.trl-1.15
%U https://aclanthology.org/2025.trl-1.15/
%U https://doi.org/10.18653/v1/2025.trl-1.15
%P 182-191
Markdown (Informal)
[In-Context Learning of Soft Nearest Neighbor Classifiers for Intelligible Tabular Machine Learning](https://aclanthology.org/2025.trl-1.15/) (Koshil et al., TRL 2025)
ACL