@InProceedings{katiyar-cardie:2017:Long,
  author    = {Katiyar, Arzoo  and  Cardie, Claire},
  title     = {Going out on a limb: Joint Extraction of Entity Mentions and Relations without Dependency Trees},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {917--928},
  abstract  = {We present a novel attention-based recurrent neural network for joint
	extraction of entity mentions and relations. We show that attention along with
	long short term memory (LSTM) network can extract semantic relations between
	entity mentions without having access to dependency trees. 
	Experiments on Automatic Content Extraction (ACE) corpora show that our model
	significantly outperforms feature-based joint model by Li and Ji (2014). We
	also compare our model with an end-to-end tree-based LSTM model (SPTree) by
	Miwa and Bansal (2016) and show that our model performs within 1\% on entity
	mentions and 2\% on relations. Our fine-grained analysis also shows that our
	model performs significantly better on Agent-Artifact relations, while SPTree
	performs better on Physical and Part-Whole relations.},
  url       = {http://aclweb.org/anthology/P17-1085}
}

