@InProceedings{meng-rumshisky-romanov:2017:EMNLP2017,
  author    = {Meng, Yuanliang  and  Rumshisky, Anna  and  Romanov, Alexey},
  title     = {Temporal Information Extraction for Question Answering Using Syntactic Dependencies in an LSTM-based Architecture},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {887--896},
  abstract  = {In this paper, we propose to use a set of simple, uniform in architecture
	LSTM-based models to recover different kinds of temporal relations from text.
	Using the shortest dependency path between entities as input, the same
	architecture is used to extract intra-sentence, cross-sentence, and document
	creation time relations. A ``double-checking'' technique reverses entity pairs
	in classification, boosting the recall of positive cases and reducing
	misclassifications between opposite classes. An efficient pruning algorithm
	resolves conflicts globally. Evaluated on QA-TempEval (SemEval2015 Task 5), our
	proposed technique outperforms state-of-the-art methods by a large margin. We
	also conduct intrinsic evaluation and post state-of-the-art results on
	Timebank-Dense.},
  url       = {https://www.aclweb.org/anthology/D17-1092}
}

