@inproceedings{luo-etal-2025-large-language,
title = "How Do Large Language Models Perform on {PDE} Discovery: A Coarse-to-fine Perspective",
author = "Luo, Xiao and
Wang, Changhu and
Sun, Yizhou and
Wang, Wei",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2025.findings-emnlp.145/",
pages = "2684--2697",
ISBN = "979-8-89176-335-7",
abstract = "This paper studies the problem of how to use large language models (LLMs) to identify the underlying partial differential equations (PDEs) out of very limited observations of a physical system. Previous methods usually utilize physical-informed neural networks (PINNs) to learn the PDE solver and coefficient of PDEs simultaneously, which could suffer from performance degradation under extreme data scarcity. Towards this end, this paper attempts to utilize LLMs to solve this problem without further fine-tuning by proposing a novel framework named LLM for PDE Discovery (LLM4PD). The core of our LLM4PD is to utilize a coarse-to-fine paradigm to automatically discover underlying PDEs. In the coarse phase, LLM4PD selects the crucial terms from a library with hierarchical prompts and incorporates a review agent to enhance the accuracy. In the fine phase, LLM4PD interacts with a PDE solver to optimize the coefficient of the selected terms with the optimization trajectory. We also provide an adaptive hybrid optimization strategy switching between fine-tuning and exploration to balance stability and efficiency. Extensive experiments on several systems validate the effectiveness of our proposed LLM4PD in different settings."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="luo-etal-2025-large-language">
<titleInfo>
<title>How Do Large Language Models Perform on PDE Discovery: A Coarse-to-fine Perspective</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xiao</namePart>
<namePart type="family">Luo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Changhu</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yizhou</namePart>
<namePart type="family">Sun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wei</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2025</title>
</titleInfo>
<name type="personal">
<namePart type="given">Christos</namePart>
<namePart type="family">Christodoulopoulos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tanmoy</namePart>
<namePart type="family">Chakraborty</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Carolyn</namePart>
<namePart type="family">Rose</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Violet</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Suzhou, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-335-7</identifier>
</relatedItem>
<abstract>This paper studies the problem of how to use large language models (LLMs) to identify the underlying partial differential equations (PDEs) out of very limited observations of a physical system. Previous methods usually utilize physical-informed neural networks (PINNs) to learn the PDE solver and coefficient of PDEs simultaneously, which could suffer from performance degradation under extreme data scarcity. Towards this end, this paper attempts to utilize LLMs to solve this problem without further fine-tuning by proposing a novel framework named LLM for PDE Discovery (LLM4PD). The core of our LLM4PD is to utilize a coarse-to-fine paradigm to automatically discover underlying PDEs. In the coarse phase, LLM4PD selects the crucial terms from a library with hierarchical prompts and incorporates a review agent to enhance the accuracy. In the fine phase, LLM4PD interacts with a PDE solver to optimize the coefficient of the selected terms with the optimization trajectory. We also provide an adaptive hybrid optimization strategy switching between fine-tuning and exploration to balance stability and efficiency. Extensive experiments on several systems validate the effectiveness of our proposed LLM4PD in different settings.</abstract>
<identifier type="citekey">luo-etal-2025-large-language</identifier>
<location>
<url>https://aclanthology.org/2025.findings-emnlp.145/</url>
</location>
<part>
<date>2025-11</date>
<extent unit="page">
<start>2684</start>
<end>2697</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T How Do Large Language Models Perform on PDE Discovery: A Coarse-to-fine Perspective
%A Luo, Xiao
%A Wang, Changhu
%A Sun, Yizhou
%A Wang, Wei
%Y Christodoulopoulos, Christos
%Y Chakraborty, Tanmoy
%Y Rose, Carolyn
%Y Peng, Violet
%S Findings of the Association for Computational Linguistics: EMNLP 2025
%D 2025
%8 November
%I Association for Computational Linguistics
%C Suzhou, China
%@ 979-8-89176-335-7
%F luo-etal-2025-large-language
%X This paper studies the problem of how to use large language models (LLMs) to identify the underlying partial differential equations (PDEs) out of very limited observations of a physical system. Previous methods usually utilize physical-informed neural networks (PINNs) to learn the PDE solver and coefficient of PDEs simultaneously, which could suffer from performance degradation under extreme data scarcity. Towards this end, this paper attempts to utilize LLMs to solve this problem without further fine-tuning by proposing a novel framework named LLM for PDE Discovery (LLM4PD). The core of our LLM4PD is to utilize a coarse-to-fine paradigm to automatically discover underlying PDEs. In the coarse phase, LLM4PD selects the crucial terms from a library with hierarchical prompts and incorporates a review agent to enhance the accuracy. In the fine phase, LLM4PD interacts with a PDE solver to optimize the coefficient of the selected terms with the optimization trajectory. We also provide an adaptive hybrid optimization strategy switching between fine-tuning and exploration to balance stability and efficiency. Extensive experiments on several systems validate the effectiveness of our proposed LLM4PD in different settings.
%U https://aclanthology.org/2025.findings-emnlp.145/
%P 2684-2697
Markdown (Informal)
[How Do Large Language Models Perform on PDE Discovery: A Coarse-to-fine Perspective](https://aclanthology.org/2025.findings-emnlp.145/) (Luo et al., Findings 2025)
ACL