@InProceedings{qian-EtAl:2017:StructPred,
  author    = {Qian, Feng  and  Sha, Lei  and  Chang, Baobao  and  Liu, LuChen  and  Zhang, Ming},
  title     = {Syntax Aware LSTM model for Semantic Role Labeling},
  booktitle = {Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {27--32},
  abstract  = {In Semantic Role Labeling (SRL) task, the tree structured dependency relation
	is rich in syntax information, but it is not well handled by existing models.
	In this paper, we propose Syntax Aware Long Short Time Memory (SA-LSTM). The
	structure of SA-LSTM changes according to dependency structure of each
	sentence, so that SA-LSTM can model the whole tree structure of dependency
	relation in an architecture engineering way. Experiments demonstrate that on
	Chinese Proposition Bank (CPB) 1.0, SA-LSTM improves F1 by 2.06% than ordinary
	bi-LSTM with feature engineered dependency relation information, and gives
	state-of-the-art F1 of 79.92%. On English CoNLL 2005 dataset, SA-LSTM brings
	improvement (2.1%) to bi-LSTM model and also brings slight improvement (0.3%)
	when added to the state-of-the-art model.},
  url       = {http://www.aclweb.org/anthology/W17-4305}
}

