@inproceedings{pannitto-herbelot-2020-recurrent,
title = "Recurrent babbling: evaluating the acquisition of grammar from limited input data",
author = "Pannitto, Ludovica and
Herbelot, Aur{\'e}lie",
editor = "Fern{\'a}ndez, Raquel and
Linzen, Tal",
booktitle = "Proceedings of the 24th Conference on Computational Natural Language Learning",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.conll-1.13",
doi = "10.18653/v1/2020.conll-1.13",
pages = "165--176",
abstract = "Recurrent Neural Networks (RNNs) have been shown to capture various aspects of syntax from raw linguistic input. In most previous experiments, however, learning happens over unrealistic corpora, which do not reflect the type and amount of data a child would be exposed to. This paper remedies this state of affairs by training an LSTM over a realistically sized subset of child-directed input. The behaviour of the network is analysed over time using a novel methodology which consists in quantifying the level of grammatical abstraction in the model{'}s generated output (its {`}babbling{'}), compared to the language it has been exposed to. We show that the LSTM indeed abstracts new structures as learning proceeds.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="pannitto-herbelot-2020-recurrent">
<titleInfo>
<title>Recurrent babbling: evaluating the acquisition of grammar from limited input data</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ludovica</namePart>
<namePart type="family">Pannitto</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Aurélie</namePart>
<namePart type="family">Herbelot</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 24th Conference on Computational Natural Language Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Raquel</namePart>
<namePart type="family">Fernández</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tal</namePart>
<namePart type="family">Linzen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Recurrent Neural Networks (RNNs) have been shown to capture various aspects of syntax from raw linguistic input. In most previous experiments, however, learning happens over unrealistic corpora, which do not reflect the type and amount of data a child would be exposed to. This paper remedies this state of affairs by training an LSTM over a realistically sized subset of child-directed input. The behaviour of the network is analysed over time using a novel methodology which consists in quantifying the level of grammatical abstraction in the model’s generated output (its ‘babbling’), compared to the language it has been exposed to. We show that the LSTM indeed abstracts new structures as learning proceeds.</abstract>
<identifier type="citekey">pannitto-herbelot-2020-recurrent</identifier>
<identifier type="doi">10.18653/v1/2020.conll-1.13</identifier>
<location>
<url>https://aclanthology.org/2020.conll-1.13</url>
</location>
<part>
<date>2020-11</date>
<extent unit="page">
<start>165</start>
<end>176</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Recurrent babbling: evaluating the acquisition of grammar from limited input data
%A Pannitto, Ludovica
%A Herbelot, Aurélie
%Y Fernández, Raquel
%Y Linzen, Tal
%S Proceedings of the 24th Conference on Computational Natural Language Learning
%D 2020
%8 November
%I Association for Computational Linguistics
%C Online
%F pannitto-herbelot-2020-recurrent
%X Recurrent Neural Networks (RNNs) have been shown to capture various aspects of syntax from raw linguistic input. In most previous experiments, however, learning happens over unrealistic corpora, which do not reflect the type and amount of data a child would be exposed to. This paper remedies this state of affairs by training an LSTM over a realistically sized subset of child-directed input. The behaviour of the network is analysed over time using a novel methodology which consists in quantifying the level of grammatical abstraction in the model’s generated output (its ‘babbling’), compared to the language it has been exposed to. We show that the LSTM indeed abstracts new structures as learning proceeds.
%R 10.18653/v1/2020.conll-1.13
%U https://aclanthology.org/2020.conll-1.13
%U https://doi.org/10.18653/v1/2020.conll-1.13
%P 165-176
Markdown (Informal)
[Recurrent babbling: evaluating the acquisition of grammar from limited input data](https://aclanthology.org/2020.conll-1.13) (Pannitto & Herbelot, CoNLL 2020)
ACL