@inproceedings{calvillo-crocker-2018-language,
title = "Language Production Dynamics with Recurrent Neural Networks",
author = "Calvillo, Jes{\'u}s and
Crocker, Matthew",
editor = "Idiart, Marco and
Lenci, Alessandro and
Poibeau, Thierry and
Villavicencio, Aline",
booktitle = "Proceedings of the Eight Workshop on Cognitive Aspects of Computational Language Learning and Processing",
month = jul,
year = "2018",
address = "Melbourne",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-2803",
doi = "10.18653/v1/W18-2803",
pages = "17--26",
abstract = "We present an analysis of the internal mechanism of the recurrent neural model of sentence production presented by Calvillo et al. (2016). The results show clear patterns of computation related to each layer in the network allowing to infer an algorithmic account, where the semantics activates the semantically related words, then each word generated at each time step activates syntactic and semantic constraints on possible continuations, while the recurrence preserves information through time. We propose that such insights could generalize to other models with similar architecture, including some used in computational linguistics for language modeling, machine translation and image caption generation.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="calvillo-crocker-2018-language">
<titleInfo>
<title>Language Production Dynamics with Recurrent Neural Networks</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jesús</namePart>
<namePart type="family">Calvillo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Matthew</namePart>
<namePart type="family">Crocker</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Eight Workshop on Cognitive Aspects of Computational Language Learning and Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Marco</namePart>
<namePart type="family">Idiart</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alessandro</namePart>
<namePart type="family">Lenci</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Thierry</namePart>
<namePart type="family">Poibeau</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Aline</namePart>
<namePart type="family">Villavicencio</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Melbourne</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We present an analysis of the internal mechanism of the recurrent neural model of sentence production presented by Calvillo et al. (2016). The results show clear patterns of computation related to each layer in the network allowing to infer an algorithmic account, where the semantics activates the semantically related words, then each word generated at each time step activates syntactic and semantic constraints on possible continuations, while the recurrence preserves information through time. We propose that such insights could generalize to other models with similar architecture, including some used in computational linguistics for language modeling, machine translation and image caption generation.</abstract>
<identifier type="citekey">calvillo-crocker-2018-language</identifier>
<identifier type="doi">10.18653/v1/W18-2803</identifier>
<location>
<url>https://aclanthology.org/W18-2803</url>
</location>
<part>
<date>2018-07</date>
<extent unit="page">
<start>17</start>
<end>26</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Language Production Dynamics with Recurrent Neural Networks
%A Calvillo, Jesús
%A Crocker, Matthew
%Y Idiart, Marco
%Y Lenci, Alessandro
%Y Poibeau, Thierry
%Y Villavicencio, Aline
%S Proceedings of the Eight Workshop on Cognitive Aspects of Computational Language Learning and Processing
%D 2018
%8 July
%I Association for Computational Linguistics
%C Melbourne
%F calvillo-crocker-2018-language
%X We present an analysis of the internal mechanism of the recurrent neural model of sentence production presented by Calvillo et al. (2016). The results show clear patterns of computation related to each layer in the network allowing to infer an algorithmic account, where the semantics activates the semantically related words, then each word generated at each time step activates syntactic and semantic constraints on possible continuations, while the recurrence preserves information through time. We propose that such insights could generalize to other models with similar architecture, including some used in computational linguistics for language modeling, machine translation and image caption generation.
%R 10.18653/v1/W18-2803
%U https://aclanthology.org/W18-2803
%U https://doi.org/10.18653/v1/W18-2803
%P 17-26
Markdown (Informal)
[Language Production Dynamics with Recurrent Neural Networks](https://aclanthology.org/W18-2803) (Calvillo & Crocker, CogACLL 2018)
ACL