@InProceedings{peng-EtAl:2017:EACLlong1,
  author    = {Peng, Xiaochang  and  Wang, Chuan  and  Gildea, Daniel  and  Xue, Nianwen},
  title     = {Addressing the Data Sparsity Issue in Neural AMR Parsing},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {366--375},
  abstract  = {Neural attention models have achieved great success in different NLP tasks.
	How- ever, they have not fulfilled their promise on the AMR parsing task due to
	the data sparsity issue. In this paper, we de- scribe a sequence-to-sequence
	model for AMR parsing and present different ways to tackle the data sparsity
	problem. We show that our methods achieve significant improvement over a
	baseline neural attention model and our results are also competitive against
	state-of-the-art systems that do not use extra linguistic resources.},
  url       = {http://www.aclweb.org/anthology/E17-1035}
}

