@inproceedings{dusell-chiang-2020-learning,
title = "Learning Context-free Languages with Nondeterministic Stack {RNN}s",
author = "DuSell, Brian and
Chiang, David",
editor = "Fern{\'a}ndez, Raquel and
Linzen, Tal",
booktitle = "Proceedings of the 24th Conference on Computational Natural Language Learning",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.conll-1.41",
doi = "10.18653/v1/2020.conll-1.41",
pages = "507--519",
abstract = "We present a differentiable stack data structure that simultaneously and tractably encodes an exponential number of stack configurations, based on Lang{'}s algorithm for simulating nondeterministic pushdown automata. We call the combination of this data structure with a recurrent neural network (RNN) controller a Nondeterministic Stack RNN. We compare our model against existing stack RNNs on various formal languages, demonstrating that our model converges more reliably to algorithmic behavior on deterministic tasks, and achieves lower cross-entropy on inherently nondeterministic tasks.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="dusell-chiang-2020-learning">
<titleInfo>
<title>Learning Context-free Languages with Nondeterministic Stack RNNs</title>
</titleInfo>
<name type="personal">
<namePart type="given">Brian</namePart>
<namePart type="family">DuSell</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">David</namePart>
<namePart type="family">Chiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 24th Conference on Computational Natural Language Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Raquel</namePart>
<namePart type="family">Fernández</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tal</namePart>
<namePart type="family">Linzen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We present a differentiable stack data structure that simultaneously and tractably encodes an exponential number of stack configurations, based on Lang’s algorithm for simulating nondeterministic pushdown automata. We call the combination of this data structure with a recurrent neural network (RNN) controller a Nondeterministic Stack RNN. We compare our model against existing stack RNNs on various formal languages, demonstrating that our model converges more reliably to algorithmic behavior on deterministic tasks, and achieves lower cross-entropy on inherently nondeterministic tasks.</abstract>
<identifier type="citekey">dusell-chiang-2020-learning</identifier>
<identifier type="doi">10.18653/v1/2020.conll-1.41</identifier>
<location>
<url>https://aclanthology.org/2020.conll-1.41</url>
</location>
<part>
<date>2020-11</date>
<extent unit="page">
<start>507</start>
<end>519</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Learning Context-free Languages with Nondeterministic Stack RNNs
%A DuSell, Brian
%A Chiang, David
%Y Fernández, Raquel
%Y Linzen, Tal
%S Proceedings of the 24th Conference on Computational Natural Language Learning
%D 2020
%8 November
%I Association for Computational Linguistics
%C Online
%F dusell-chiang-2020-learning
%X We present a differentiable stack data structure that simultaneously and tractably encodes an exponential number of stack configurations, based on Lang’s algorithm for simulating nondeterministic pushdown automata. We call the combination of this data structure with a recurrent neural network (RNN) controller a Nondeterministic Stack RNN. We compare our model against existing stack RNNs on various formal languages, demonstrating that our model converges more reliably to algorithmic behavior on deterministic tasks, and achieves lower cross-entropy on inherently nondeterministic tasks.
%R 10.18653/v1/2020.conll-1.41
%U https://aclanthology.org/2020.conll-1.41
%U https://doi.org/10.18653/v1/2020.conll-1.41
%P 507-519
Markdown (Informal)
[Learning Context-free Languages with Nondeterministic Stack RNNs](https://aclanthology.org/2020.conll-1.41) (DuSell & Chiang, CoNLL 2020)
ACL