@InProceedings{peng-thomson-smith:2017:Long,
  author    = {Peng, Hao  and  Thomson, Sam  and  Smith, Noah A.},
  title     = {Deep Multitask Learning for Semantic Dependency Parsing},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {2037--2048},
  abstract  = {We present a deep neural architecture that parses sentences into three semantic
	dependency graph formalisms. By using efficient, nearly arc-factored inference
	and a bidirectional-LSTM composed with a multi-layer perceptron,  our base
	system is able to significantly improve the state of the art for semantic
	dependency parsing, without using hand-engineered features or syntax. We then
	explore two multitask learning approaches---one that shares parameters across
	formalisms, and one that uses higher-order structures to predict the graphs
	jointly. We find that both approaches improve performance across formalisms on
	average, achieving a new state of the art. Our code is open-source and
	available at https://github.com/Noahs-ARK/NeurboParser.},
  url       = {http://aclweb.org/anthology/P17-1186}
}

