@inproceedings{wu-li-2024-insertgnn,
title = "{I}nsert{GNN}: A Hierarchical Graph Neural Network for the {TOEFL} Sentence Insertion Problem",
author = "Wu, Fang and
Li, Stan",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2024",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.findings-emnlp.9",
pages = "173--180",
abstract = "The integration of sentences poses an intriguing challenge within the realm of NLP, but it has not garnered the attention it deserves. Existing methods that focus on sentence arrangement, textual consistency, and question answering have been shown to be inadequate in addressing this issue. To bridge this gap, we introduce InsertGNN which conceptualizes the problem as a graph and employ a hierarchical Graph Neural Network (GNN) to comprehend the interplay between sentences. Our approach was rigorously evaluated on a TOEFL dataset, and its efficacy was further validated on the expansive arXiv dataset using cross-domain learning. Thorough experimentation unequivocally establishes InsertGNN{'}s superiority over all comparative benchmarks, achieving an impressive 70{\%} accuracy{---}a performance on par with average human test scores.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wu-li-2024-insertgnn">
<titleInfo>
<title>InsertGNN: A Hierarchical Graph Neural Network for the TOEFL Sentence Insertion Problem</title>
</titleInfo>
<name type="personal">
<namePart type="given">Fang</namePart>
<namePart type="family">Wu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Stan</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2024-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2024</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yaser</namePart>
<namePart type="family">Al-Onaizan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mohit</namePart>
<namePart type="family">Bansal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yun-Nung</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Miami, Florida, USA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>The integration of sentences poses an intriguing challenge within the realm of NLP, but it has not garnered the attention it deserves. Existing methods that focus on sentence arrangement, textual consistency, and question answering have been shown to be inadequate in addressing this issue. To bridge this gap, we introduce InsertGNN which conceptualizes the problem as a graph and employ a hierarchical Graph Neural Network (GNN) to comprehend the interplay between sentences. Our approach was rigorously evaluated on a TOEFL dataset, and its efficacy was further validated on the expansive arXiv dataset using cross-domain learning. Thorough experimentation unequivocally establishes InsertGNN’s superiority over all comparative benchmarks, achieving an impressive 70% accuracy—a performance on par with average human test scores.</abstract>
<identifier type="citekey">wu-li-2024-insertgnn</identifier>
<location>
<url>https://aclanthology.org/2024.findings-emnlp.9</url>
</location>
<part>
<date>2024-11</date>
<extent unit="page">
<start>173</start>
<end>180</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T InsertGNN: A Hierarchical Graph Neural Network for the TOEFL Sentence Insertion Problem
%A Wu, Fang
%A Li, Stan
%Y Al-Onaizan, Yaser
%Y Bansal, Mohit
%Y Chen, Yun-Nung
%S Findings of the Association for Computational Linguistics: EMNLP 2024
%D 2024
%8 November
%I Association for Computational Linguistics
%C Miami, Florida, USA
%F wu-li-2024-insertgnn
%X The integration of sentences poses an intriguing challenge within the realm of NLP, but it has not garnered the attention it deserves. Existing methods that focus on sentence arrangement, textual consistency, and question answering have been shown to be inadequate in addressing this issue. To bridge this gap, we introduce InsertGNN which conceptualizes the problem as a graph and employ a hierarchical Graph Neural Network (GNN) to comprehend the interplay between sentences. Our approach was rigorously evaluated on a TOEFL dataset, and its efficacy was further validated on the expansive arXiv dataset using cross-domain learning. Thorough experimentation unequivocally establishes InsertGNN’s superiority over all comparative benchmarks, achieving an impressive 70% accuracy—a performance on par with average human test scores.
%U https://aclanthology.org/2024.findings-emnlp.9
%P 173-180
Markdown (Informal)
[InsertGNN: A Hierarchical Graph Neural Network for the TOEFL Sentence Insertion Problem](https://aclanthology.org/2024.findings-emnlp.9) (Wu & Li, Findings 2024)
ACL