@inproceedings{qian-etal-2017-syntax,
title = "Syntax Aware {LSTM} model for Semantic Role Labeling",
author = "Qian, Feng and
Sha, Lei and
Chang, Baobao and
Liu, Lu-chen and
Zhang, Ming",
editor = "Chang, Kai-Wei and
Chang, Ming-Wei and
Srikumar, Vivek and
Rush, Alexander M.",
booktitle = "Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W17-4305",
doi = "10.18653/v1/W17-4305",
pages = "27--32",
abstract = "In Semantic Role Labeling (SRL) task, the tree structured dependency relation is rich in syntax information, but it is not well handled by existing models. In this paper, we propose Syntax Aware Long Short Time Memory (SA-LSTM). The structure of SA-LSTM changes according to dependency structure of each sentence, so that SA-LSTM can model the whole tree structure of dependency relation in an architecture engineering way. Experiments demonstrate that on Chinese Proposition Bank (CPB) 1.0, SA-LSTM improves F1 by 2.06{\%} than ordinary bi-LSTM with feature engineered dependency relation information, and gives state-of-the-art F1 of 79.92{\%}. On English CoNLL 2005 dataset, SA-LSTM brings improvement (2.1{\%}) to bi-LSTM model and also brings slight improvement (0.3{\%}) when added to the state-of-the-art model.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="qian-etal-2017-syntax">
<titleInfo>
<title>Syntax Aware LSTM model for Semantic Role Labeling</title>
</titleInfo>
<name type="personal">
<namePart type="given">Feng</namePart>
<namePart type="family">Qian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lei</namePart>
<namePart type="family">Sha</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Baobao</namePart>
<namePart type="family">Chang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lu-chen</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ming</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kai-Wei</namePart>
<namePart type="family">Chang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ming-Wei</namePart>
<namePart type="family">Chang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vivek</namePart>
<namePart type="family">Srikumar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexander</namePart>
<namePart type="given">M</namePart>
<namePart type="family">Rush</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Copenhagen, Denmark</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In Semantic Role Labeling (SRL) task, the tree structured dependency relation is rich in syntax information, but it is not well handled by existing models. In this paper, we propose Syntax Aware Long Short Time Memory (SA-LSTM). The structure of SA-LSTM changes according to dependency structure of each sentence, so that SA-LSTM can model the whole tree structure of dependency relation in an architecture engineering way. Experiments demonstrate that on Chinese Proposition Bank (CPB) 1.0, SA-LSTM improves F1 by 2.06% than ordinary bi-LSTM with feature engineered dependency relation information, and gives state-of-the-art F1 of 79.92%. On English CoNLL 2005 dataset, SA-LSTM brings improvement (2.1%) to bi-LSTM model and also brings slight improvement (0.3%) when added to the state-of-the-art model.</abstract>
<identifier type="citekey">qian-etal-2017-syntax</identifier>
<identifier type="doi">10.18653/v1/W17-4305</identifier>
<location>
<url>https://aclanthology.org/W17-4305</url>
</location>
<part>
<date>2017-09</date>
<extent unit="page">
<start>27</start>
<end>32</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Syntax Aware LSTM model for Semantic Role Labeling
%A Qian, Feng
%A Sha, Lei
%A Chang, Baobao
%A Liu, Lu-chen
%A Zhang, Ming
%Y Chang, Kai-Wei
%Y Chang, Ming-Wei
%Y Srikumar, Vivek
%Y Rush, Alexander M.
%S Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing
%D 2017
%8 September
%I Association for Computational Linguistics
%C Copenhagen, Denmark
%F qian-etal-2017-syntax
%X In Semantic Role Labeling (SRL) task, the tree structured dependency relation is rich in syntax information, but it is not well handled by existing models. In this paper, we propose Syntax Aware Long Short Time Memory (SA-LSTM). The structure of SA-LSTM changes according to dependency structure of each sentence, so that SA-LSTM can model the whole tree structure of dependency relation in an architecture engineering way. Experiments demonstrate that on Chinese Proposition Bank (CPB) 1.0, SA-LSTM improves F1 by 2.06% than ordinary bi-LSTM with feature engineered dependency relation information, and gives state-of-the-art F1 of 79.92%. On English CoNLL 2005 dataset, SA-LSTM brings improvement (2.1%) to bi-LSTM model and also brings slight improvement (0.3%) when added to the state-of-the-art model.
%R 10.18653/v1/W17-4305
%U https://aclanthology.org/W17-4305
%U https://doi.org/10.18653/v1/W17-4305
%P 27-32
Markdown (Informal)
[Syntax Aware LSTM model for Semantic Role Labeling](https://aclanthology.org/W17-4305) (Qian et al., 2017)
ACL
- Feng Qian, Lei Sha, Baobao Chang, Lu-chen Liu, and Ming Zhang. 2017. Syntax Aware LSTM model for Semantic Role Labeling. In Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing, pages 27–32, Copenhagen, Denmark. Association for Computational Linguistics.