@article{kirov-cotterell-2018-recurrent,
title = "Recurrent Neural Networks in Linguistic Theory: Revisiting Pinker and Prince (1988) and the Past Tense Debate",
author = "Kirov, Christo and
Cotterell, Ryan",
editor = "Lee, Lillian and
Johnson, Mark and
Toutanova, Kristina and
Roark, Brian",
journal = "Transactions of the Association for Computational Linguistics",
volume = "6",
year = "2018",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/Q18-1045",
doi = "10.1162/tacl_a_00247",
pages = "651--665",
abstract = "Can advances in NLP help advance cognitive modeling? We examine the role of artificial neural networks, the current state of the art in many common NLP tasks, by returning to a classic case study. In 1986, Rumelhart and McClelland famously introduced a neural architecture that learned to transduce English verb stems to their past tense forms. Shortly thereafter in 1988, Pinker and Prince presented a comprehensive rebuttal of many of Rumelhart and McClelland{'}s claims. Much of the force of their attack centered on the empirical inadequacy of the Rumelhart and McClelland model. Today, however, that model is severely outmoded. We show that the Encoder-Decoder network architectures used in modern NLP systems obviate most of Pinker and Prince{'}s criticisms without requiring any simplification of the past tense mapping problem. We suggest that the empirical performance of modern networks warrants a reexamination of their utility in linguistic and cognitive modeling.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kirov-cotterell-2018-recurrent">
<titleInfo>
<title>Recurrent Neural Networks in Linguistic Theory: Revisiting Pinker and Prince (1988) and the Past Tense Debate</title>
</titleInfo>
<name type="personal">
<namePart type="given">Christo</namePart>
<namePart type="family">Kirov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ryan</namePart>
<namePart type="family">Cotterell</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Transactions of the Association for Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>Can advances in NLP help advance cognitive modeling? We examine the role of artificial neural networks, the current state of the art in many common NLP tasks, by returning to a classic case study. In 1986, Rumelhart and McClelland famously introduced a neural architecture that learned to transduce English verb stems to their past tense forms. Shortly thereafter in 1988, Pinker and Prince presented a comprehensive rebuttal of many of Rumelhart and McClelland’s claims. Much of the force of their attack centered on the empirical inadequacy of the Rumelhart and McClelland model. Today, however, that model is severely outmoded. We show that the Encoder-Decoder network architectures used in modern NLP systems obviate most of Pinker and Prince’s criticisms without requiring any simplification of the past tense mapping problem. We suggest that the empirical performance of modern networks warrants a reexamination of their utility in linguistic and cognitive modeling.</abstract>
<identifier type="citekey">kirov-cotterell-2018-recurrent</identifier>
<identifier type="doi">10.1162/tacl_a_00247</identifier>
<location>
<url>https://aclanthology.org/Q18-1045</url>
</location>
<part>
<date>2018</date>
<detail type="volume"><number>6</number></detail>
<extent unit="page">
<start>651</start>
<end>665</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Recurrent Neural Networks in Linguistic Theory: Revisiting Pinker and Prince (1988) and the Past Tense Debate
%A Kirov, Christo
%A Cotterell, Ryan
%J Transactions of the Association for Computational Linguistics
%D 2018
%V 6
%I MIT Press
%C Cambridge, MA
%F kirov-cotterell-2018-recurrent
%X Can advances in NLP help advance cognitive modeling? We examine the role of artificial neural networks, the current state of the art in many common NLP tasks, by returning to a classic case study. In 1986, Rumelhart and McClelland famously introduced a neural architecture that learned to transduce English verb stems to their past tense forms. Shortly thereafter in 1988, Pinker and Prince presented a comprehensive rebuttal of many of Rumelhart and McClelland’s claims. Much of the force of their attack centered on the empirical inadequacy of the Rumelhart and McClelland model. Today, however, that model is severely outmoded. We show that the Encoder-Decoder network architectures used in modern NLP systems obviate most of Pinker and Prince’s criticisms without requiring any simplification of the past tense mapping problem. We suggest that the empirical performance of modern networks warrants a reexamination of their utility in linguistic and cognitive modeling.
%R 10.1162/tacl_a_00247
%U https://aclanthology.org/Q18-1045
%U https://doi.org/10.1162/tacl_a_00247
%P 651-665
Markdown (Informal)
[Recurrent Neural Networks in Linguistic Theory: Revisiting Pinker and Prince (1988) and the Past Tense Debate](https://aclanthology.org/Q18-1045) (Kirov & Cotterell, TACL 2018)
ACL