@inproceedings{kirnap-etal-2018-tree,
title = "Tree-Stack {LSTM} in Transition Based Dependency Parsing",
author = {K{\i}rnap, {\"O}mer and
Dayan{\i}k, Erenay and
Yuret, Deniz},
editor = "Zeman, Daniel and
Haji{\v{c}}, Jan",
booktitle = "Proceedings of the {C}o{NLL} 2018 Shared Task: Multilingual Parsing from Raw Text to Universal Dependencies",
month = oct,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/K18-2012",
doi = "10.18653/v1/K18-2012",
pages = "124--132",
abstract = "We introduce tree-stack LSTM to model state of a transition based parser with recurrent neural networks. Tree-stack LSTM does not use any parse tree based or hand-crafted features, yet performs better than models with these features. We also develop new set of embeddings from raw features to enhance the performance. There are 4 main components of this model: stack{'}s σ-LSTM, buffer{'}s β-LSTM, actions{'} LSTM and tree-RNN. All LSTMs use continuous dense feature vectors (embeddings) as an input. Tree-RNN updates these embeddings based on transitions. We show that our model improves performance with low resource languages compared with its predecessors. We participate in CoNLL 2018 UD Shared Task as the {``}KParse{''} team and ranked 16th in LAS, 15th in BLAS and BLEX metrics, of 27 participants parsing 82 test sets from 57 languages.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kirnap-etal-2018-tree">
<titleInfo>
<title>Tree-Stack LSTM in Transition Based Dependency Parsing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ömer</namePart>
<namePart type="family">Kırnap</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Erenay</namePart>
<namePart type="family">Dayanık</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Deniz</namePart>
<namePart type="family">Yuret</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-10</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the CoNLL 2018 Shared Task: Multilingual Parsing from Raw Text to Universal Dependencies</title>
</titleInfo>
<name type="personal">
<namePart type="given">Daniel</namePart>
<namePart type="family">Zeman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jan</namePart>
<namePart type="family">Hajič</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Brussels, Belgium</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We introduce tree-stack LSTM to model state of a transition based parser with recurrent neural networks. Tree-stack LSTM does not use any parse tree based or hand-crafted features, yet performs better than models with these features. We also develop new set of embeddings from raw features to enhance the performance. There are 4 main components of this model: stack’s σ-LSTM, buffer’s β-LSTM, actions’ LSTM and tree-RNN. All LSTMs use continuous dense feature vectors (embeddings) as an input. Tree-RNN updates these embeddings based on transitions. We show that our model improves performance with low resource languages compared with its predecessors. We participate in CoNLL 2018 UD Shared Task as the “KParse” team and ranked 16th in LAS, 15th in BLAS and BLEX metrics, of 27 participants parsing 82 test sets from 57 languages.</abstract>
<identifier type="citekey">kirnap-etal-2018-tree</identifier>
<identifier type="doi">10.18653/v1/K18-2012</identifier>
<location>
<url>https://aclanthology.org/K18-2012</url>
</location>
<part>
<date>2018-10</date>
<extent unit="page">
<start>124</start>
<end>132</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Tree-Stack LSTM in Transition Based Dependency Parsing
%A Kırnap, Ömer
%A Dayanık, Erenay
%A Yuret, Deniz
%Y Zeman, Daniel
%Y Hajič, Jan
%S Proceedings of the CoNLL 2018 Shared Task: Multilingual Parsing from Raw Text to Universal Dependencies
%D 2018
%8 October
%I Association for Computational Linguistics
%C Brussels, Belgium
%F kirnap-etal-2018-tree
%X We introduce tree-stack LSTM to model state of a transition based parser with recurrent neural networks. Tree-stack LSTM does not use any parse tree based or hand-crafted features, yet performs better than models with these features. We also develop new set of embeddings from raw features to enhance the performance. There are 4 main components of this model: stack’s σ-LSTM, buffer’s β-LSTM, actions’ LSTM and tree-RNN. All LSTMs use continuous dense feature vectors (embeddings) as an input. Tree-RNN updates these embeddings based on transitions. We show that our model improves performance with low resource languages compared with its predecessors. We participate in CoNLL 2018 UD Shared Task as the “KParse” team and ranked 16th in LAS, 15th in BLAS and BLEX metrics, of 27 participants parsing 82 test sets from 57 languages.
%R 10.18653/v1/K18-2012
%U https://aclanthology.org/K18-2012
%U https://doi.org/10.18653/v1/K18-2012
%P 124-132
Markdown (Informal)
[Tree-Stack LSTM in Transition Based Dependency Parsing](https://aclanthology.org/K18-2012) (Kırnap et al., CoNLL 2018)
ACL
- Ömer Kırnap, Erenay Dayanık, and Deniz Yuret. 2018. Tree-Stack LSTM in Transition Based Dependency Parsing. In Proceedings of the CoNLL 2018 Shared Task: Multilingual Parsing from Raw Text to Universal Dependencies, pages 124–132, Brussels, Belgium. Association for Computational Linguistics.