@InProceedings{chen-EtAl:2017:Long3,
  author    = {Chen, Qian  and  Zhu, Xiaodan  and  Ling, Zhen-Hua  and  Wei, Si  and  Jiang, Hui  and  Inkpen, Diana},
  title     = {Enhanced LSTM for Natural Language Inference},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {1657--1668},
  abstract  = {Reasoning and inference are central to human and artificial intelligence.
	Modeling inference in human language is very challenging. With the availability
	of large annotated data (Bowman et al., 2015), it has recently become feasible
	to train neural network based inference models, which have shown to be very
	effective. In this paper, we present a new state-of-the-art result, achieving
	the accuracy of 88.6% on the Stanford Natural Language Inference Dataset.
	Unlike the previous top models that use very complicated network architectures,
	we first demonstrate that carefully designing sequential inference models based
	on chain LSTMs can outperform all previous models. Based on this, we further
	show that by explicitly considering recursive architectures in both local
	inference modeling and inference composition, we achieve additional
	improvement. Particularly, incorporating syntactic parsing information
	contributes to our best result---it further improves the performance even when
	added to the already very strong model.},
  url       = {http://aclweb.org/anthology/P17-1152}
}

