@inproceedings{wu-hou-2025-efficient,
title = "An Efficient Retrieval-Based Method for Tabular Prediction with {LLM}",
author = "Wu, Jie and
Hou, Mengshu",
editor = "Rambow, Owen and
Wanner, Leo and
Apidianaki, Marianna and
Al-Khalifa, Hend and
Eugenio, Barbara Di and
Schockaert, Steven",
booktitle = "Proceedings of the 31st International Conference on Computational Linguistics",
month = jan,
year = "2025",
address = "Abu Dhabi, UAE",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2025.coling-main.663/",
pages = "9917--9925",
abstract = "Tabular prediction, a well-established problem in machine learning, has consistently garnered significant research attention within academia and industry. Recently, with the rapid development of large language models (LLMs), there has been increasing exploration of how to apply LLMs to tabular prediction tasks. Many existing methods, however, typically rely on extensive pre-training or fine-tuning of LLMs, which demands considerable computational resources. To avoid this, we propose a retrieval-based approach that utilizes the powerful capabilities of LLMs in representation, comprehension, and inference. Our approach eliminates the need for training any modules or performing data augmentation, depending solely on information from target dataset. Experimental results reveal that, even without specialized training for tabular data, our method exhibits strong predictive performance on tabular prediction task, affirming its practicality and effectiveness."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wu-hou-2025-efficient">
<titleInfo>
<title>An Efficient Retrieval-Based Method for Tabular Prediction with LLM</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jie</namePart>
<namePart type="family">Wu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mengshu</namePart>
<namePart type="family">Hou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-01</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 31st International Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Owen</namePart>
<namePart type="family">Rambow</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Leo</namePart>
<namePart type="family">Wanner</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marianna</namePart>
<namePart type="family">Apidianaki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hend</namePart>
<namePart type="family">Al-Khalifa</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Barbara</namePart>
<namePart type="given">Di</namePart>
<namePart type="family">Eugenio</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Steven</namePart>
<namePart type="family">Schockaert</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Abu Dhabi, UAE</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Tabular prediction, a well-established problem in machine learning, has consistently garnered significant research attention within academia and industry. Recently, with the rapid development of large language models (LLMs), there has been increasing exploration of how to apply LLMs to tabular prediction tasks. Many existing methods, however, typically rely on extensive pre-training or fine-tuning of LLMs, which demands considerable computational resources. To avoid this, we propose a retrieval-based approach that utilizes the powerful capabilities of LLMs in representation, comprehension, and inference. Our approach eliminates the need for training any modules or performing data augmentation, depending solely on information from target dataset. Experimental results reveal that, even without specialized training for tabular data, our method exhibits strong predictive performance on tabular prediction task, affirming its practicality and effectiveness.</abstract>
<identifier type="citekey">wu-hou-2025-efficient</identifier>
<location>
<url>https://aclanthology.org/2025.coling-main.663/</url>
</location>
<part>
<date>2025-01</date>
<extent unit="page">
<start>9917</start>
<end>9925</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T An Efficient Retrieval-Based Method for Tabular Prediction with LLM
%A Wu, Jie
%A Hou, Mengshu
%Y Rambow, Owen
%Y Wanner, Leo
%Y Apidianaki, Marianna
%Y Al-Khalifa, Hend
%Y Eugenio, Barbara Di
%Y Schockaert, Steven
%S Proceedings of the 31st International Conference on Computational Linguistics
%D 2025
%8 January
%I Association for Computational Linguistics
%C Abu Dhabi, UAE
%F wu-hou-2025-efficient
%X Tabular prediction, a well-established problem in machine learning, has consistently garnered significant research attention within academia and industry. Recently, with the rapid development of large language models (LLMs), there has been increasing exploration of how to apply LLMs to tabular prediction tasks. Many existing methods, however, typically rely on extensive pre-training or fine-tuning of LLMs, which demands considerable computational resources. To avoid this, we propose a retrieval-based approach that utilizes the powerful capabilities of LLMs in representation, comprehension, and inference. Our approach eliminates the need for training any modules or performing data augmentation, depending solely on information from target dataset. Experimental results reveal that, even without specialized training for tabular data, our method exhibits strong predictive performance on tabular prediction task, affirming its practicality and effectiveness.
%U https://aclanthology.org/2025.coling-main.663/
%P 9917-9925
Markdown (Informal)
[An Efficient Retrieval-Based Method for Tabular Prediction with LLM](https://aclanthology.org/2025.coling-main.663/) (Wu & Hou, COLING 2025)
ACL