@inproceedings{benedetto-buttery-2025-towards,
title = "Towards {CEFR}-targeted Text Simplification for Question Adaptation",
author = "Benedetto, Luca and
Buttery, Paula",
editor = "Angelova, Galia and
Kunilovskaya, Maria and
Escribe, Marie and
Mitkov, Ruslan",
booktitle = "Proceedings of the 15th International Conference on Recent Advances in Natural Language Processing - Natural Language Processing in the Generative AI Era",
month = sep,
year = "2025",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd., Shoumen, Bulgaria",
url = "https://aclanthology.org/2025.ranlp-1.18/",
pages = "150--157",
abstract = "Text Simplification (TS) can adapt educational content to learners' proficiency levels. In reading comprehension questions, passage complexity directly affects the question difficulty; thus, TS could enable automatic question adaptation by generating multiple versions of a reading passage. However, despite the potential of TS and its applications in other domains, the feasibility, reliability, and robustness of TS for question adaptation remains unexplored. In this paper, we conduct the first evaluation of LLMs for CEFR targeted text simplification aimed at question adaptation. Specifically, we investigate whether LLMs can perform CEFR-targeted text simplification and how this affects question answerability. Evaluating four LLMs on two English learning datasets, we show that they can mostly perform targeted simplification with readability values correlating with reference CEFR levels, but alignment is imperfect. Crucially, the simplified texts generally preserve the information needed to for question answering, and questions associated with texts simplified at lower levels show reduced difficulty in virtual pretesting. These preliminary findings show the potential of LLMs for educational content adaptation, but practical deployment will need improved CEFR alignment."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="benedetto-buttery-2025-towards">
<titleInfo>
<title>Towards CEFR-targeted Text Simplification for Question Adaptation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Luca</namePart>
<namePart type="family">Benedetto</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Paula</namePart>
<namePart type="family">Buttery</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 15th International Conference on Recent Advances in Natural Language Processing - Natural Language Processing in the Generative AI Era</title>
</titleInfo>
<name type="personal">
<namePart type="given">Galia</namePart>
<namePart type="family">Angelova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Maria</namePart>
<namePart type="family">Kunilovskaya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marie</namePart>
<namePart type="family">Escribe</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ruslan</namePart>
<namePart type="family">Mitkov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>INCOMA Ltd., Shoumen, Bulgaria</publisher>
<place>
<placeTerm type="text">Varna, Bulgaria</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Text Simplification (TS) can adapt educational content to learners’ proficiency levels. In reading comprehension questions, passage complexity directly affects the question difficulty; thus, TS could enable automatic question adaptation by generating multiple versions of a reading passage. However, despite the potential of TS and its applications in other domains, the feasibility, reliability, and robustness of TS for question adaptation remains unexplored. In this paper, we conduct the first evaluation of LLMs for CEFR targeted text simplification aimed at question adaptation. Specifically, we investigate whether LLMs can perform CEFR-targeted text simplification and how this affects question answerability. Evaluating four LLMs on two English learning datasets, we show that they can mostly perform targeted simplification with readability values correlating with reference CEFR levels, but alignment is imperfect. Crucially, the simplified texts generally preserve the information needed to for question answering, and questions associated with texts simplified at lower levels show reduced difficulty in virtual pretesting. These preliminary findings show the potential of LLMs for educational content adaptation, but practical deployment will need improved CEFR alignment.</abstract>
<identifier type="citekey">benedetto-buttery-2025-towards</identifier>
<location>
<url>https://aclanthology.org/2025.ranlp-1.18/</url>
</location>
<part>
<date>2025-09</date>
<extent unit="page">
<start>150</start>
<end>157</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Towards CEFR-targeted Text Simplification for Question Adaptation
%A Benedetto, Luca
%A Buttery, Paula
%Y Angelova, Galia
%Y Kunilovskaya, Maria
%Y Escribe, Marie
%Y Mitkov, Ruslan
%S Proceedings of the 15th International Conference on Recent Advances in Natural Language Processing - Natural Language Processing in the Generative AI Era
%D 2025
%8 September
%I INCOMA Ltd., Shoumen, Bulgaria
%C Varna, Bulgaria
%F benedetto-buttery-2025-towards
%X Text Simplification (TS) can adapt educational content to learners’ proficiency levels. In reading comprehension questions, passage complexity directly affects the question difficulty; thus, TS could enable automatic question adaptation by generating multiple versions of a reading passage. However, despite the potential of TS and its applications in other domains, the feasibility, reliability, and robustness of TS for question adaptation remains unexplored. In this paper, we conduct the first evaluation of LLMs for CEFR targeted text simplification aimed at question adaptation. Specifically, we investigate whether LLMs can perform CEFR-targeted text simplification and how this affects question answerability. Evaluating four LLMs on two English learning datasets, we show that they can mostly perform targeted simplification with readability values correlating with reference CEFR levels, but alignment is imperfect. Crucially, the simplified texts generally preserve the information needed to for question answering, and questions associated with texts simplified at lower levels show reduced difficulty in virtual pretesting. These preliminary findings show the potential of LLMs for educational content adaptation, but practical deployment will need improved CEFR alignment.
%U https://aclanthology.org/2025.ranlp-1.18/
%P 150-157
Markdown (Informal)
[Towards CEFR-targeted Text Simplification for Question Adaptation](https://aclanthology.org/2025.ranlp-1.18/) (Benedetto & Buttery, RANLP 2025)
ACL
- Luca Benedetto and Paula Buttery. 2025. Towards CEFR-targeted Text Simplification for Question Adaptation. In Proceedings of the 15th International Conference on Recent Advances in Natural Language Processing - Natural Language Processing in the Generative AI Era, pages 150–157, Varna, Bulgaria. INCOMA Ltd., Shoumen, Bulgaria.