@InProceedings{nie-bansal:2017:RepEval,
  author    = {Nie, Yixin  and  Bansal, Mohit},
  title     = {Shortcut-Stacked Sentence Encoders for Multi-Domain Inference},
  booktitle = {Proceedings of the 2nd Workshop on Evaluating Vector Space Representations for NLP},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {41--45},
  abstract  = {We present a simple sequential sentence encoder for multi-domain natural
	language inference. Our encoder is based on stacked bidirectional LSTM-RNNs
	with shortcut connections and fine-tuning of word embeddings. The overall
	supervised model uses the above encoder to encode two input sentences into two
	vectors, and then uses a classifier over the vector combination to label the
	relationship between these two sentences as that of entailment, contradiction,
	or neural. Our Shortcut-Stacked sentence encoders achieve strong improvements
	over existing encoders on matched and mismatched multi-domain natural language
	inference (top single-model result in the EMNLP RepEval 2017 Shared Task
	(Nangia et al., 2017)). Moreover, they achieve the new state-of-the-art
	encoding result on the original SNLI dataset (Bowman et al., 2015).},
  url       = {http://www.aclweb.org/anthology/W17-5308}
}

