@article{azpiazu-pera-2019-multiattentive,
title = "Multiattentive Recurrent Neural Network Architecture for Multilingual Readability Assessment",
author = "Azpiazu, Ion Madrazo and
Pera, Maria Soledad",
editor = "Lee, Lillian and
Johnson, Mark and
Roark, Brian and
Nenkova, Ani",
journal = "Transactions of the Association for Computational Linguistics",
volume = "7",
year = "2019",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/Q19-1028",
doi = "10.1162/tacl_a_00278",
pages = "421--436",
abstract = "We present a multiattentive recurrent neural network architecture for automatic multilingual readability assessment. This architecture considers raw words as its main input, but internally captures text structure and informs its word attention process using other syntax- and morphology-related datapoints, known to be of great importance to readability. This is achieved by a multiattentive strategy that allows the neural network to focus on specific parts of a text for predicting its reading level. We conducted an exhaustive evaluation using data sets targeting multiple languages and prediction task types, to compare the proposed model with traditional, state-of-the-art, and other neural network strategies.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="azpiazu-pera-2019-multiattentive">
<titleInfo>
<title>Multiattentive Recurrent Neural Network Architecture for Multilingual Readability Assessment</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ion</namePart>
<namePart type="given">Madrazo</namePart>
<namePart type="family">Azpiazu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Maria</namePart>
<namePart type="given">Soledad</namePart>
<namePart type="family">Pera</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Transactions of the Association for Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>We present a multiattentive recurrent neural network architecture for automatic multilingual readability assessment. This architecture considers raw words as its main input, but internally captures text structure and informs its word attention process using other syntax- and morphology-related datapoints, known to be of great importance to readability. This is achieved by a multiattentive strategy that allows the neural network to focus on specific parts of a text for predicting its reading level. We conducted an exhaustive evaluation using data sets targeting multiple languages and prediction task types, to compare the proposed model with traditional, state-of-the-art, and other neural network strategies.</abstract>
<identifier type="citekey">azpiazu-pera-2019-multiattentive</identifier>
<identifier type="doi">10.1162/tacl_a_00278</identifier>
<location>
<url>https://aclanthology.org/Q19-1028</url>
</location>
<part>
<date>2019</date>
<detail type="volume"><number>7</number></detail>
<extent unit="page">
<start>421</start>
<end>436</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Multiattentive Recurrent Neural Network Architecture for Multilingual Readability Assessment
%A Azpiazu, Ion Madrazo
%A Pera, Maria Soledad
%J Transactions of the Association for Computational Linguistics
%D 2019
%V 7
%I MIT Press
%C Cambridge, MA
%F azpiazu-pera-2019-multiattentive
%X We present a multiattentive recurrent neural network architecture for automatic multilingual readability assessment. This architecture considers raw words as its main input, but internally captures text structure and informs its word attention process using other syntax- and morphology-related datapoints, known to be of great importance to readability. This is achieved by a multiattentive strategy that allows the neural network to focus on specific parts of a text for predicting its reading level. We conducted an exhaustive evaluation using data sets targeting multiple languages and prediction task types, to compare the proposed model with traditional, state-of-the-art, and other neural network strategies.
%R 10.1162/tacl_a_00278
%U https://aclanthology.org/Q19-1028
%U https://doi.org/10.1162/tacl_a_00278
%P 421-436
Markdown (Informal)
[Multiattentive Recurrent Neural Network Architecture for Multilingual Readability Assessment](https://aclanthology.org/Q19-1028) (Azpiazu & Pera, TACL 2019)
ACL