@InProceedings{konstas-EtAl:2017:Long,
  author    = {Konstas, Ioannis  and  Iyer, Srinivasan  and  Yatskar, Mark  and  Choi, Yejin  and  Zettlemoyer, Luke},
  title     = {Neural AMR: Sequence-to-Sequence Models for Parsing and Generation},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {146--157},
  abstract  = {Sequence-to-sequence models have shown strong performance across a broad range
	of applications. However, their application to parsing and generating text
	using Abstract Meaning Representation (AMR) has been limited, due to the
	relatively limited amount of labeled data and the non-sequential nature of the
	AMR graphs. 
	We present a novel training procedure that can lift this limitation using
	millions of unlabeled sentences and careful preprocessing of the AMR graphs. 
	For AMR parsing, our model achieves competitive results of 62.1 SMATCH, the
	current best score reported without significant use of external semantic
	resources.
	For AMR generation, our model establishes a new state-of-the-art performance of
	BLEU 33.8. 
	We present extensive ablative and qualitative analysis including strong
	evidence that sequence-based AMR models are robust against ordering variations
	of graph-to-sequence conversions.},
  url       = {http://aclweb.org/anthology/P17-1014}
}

