@InProceedings{conneau-EtAl:2017:EMNLP2017,
  author    = {Conneau, Alexis  and  Kiela, Douwe  and  Schwenk, Holger  and  Barrault, Lo\"{i}c  and  Bordes, Antoine},
  title     = {Supervised Learning of Universal Sentence Representations from Natural Language Inference Data},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {670--680},
  abstract  = {Many modern NLP systems rely on word embeddings, previously trained in an
	unsupervised manner on large corpora, as base features. Efforts to obtain
	embeddings for larger chunks of text, such as sentences, have however not been
	so successful. Several attempts at learning unsupervised representations of
	sentences have not reached satisfactory enough performance to be widely
	adopted.
	In this paper, we show how universal sentence representations trained using the
	supervised data of the Stanford Natural Language Inference datasets can
	consistently outperform unsupervised methods like SkipThought vectors on a wide
	range of transfer tasks. Much like how computer vision uses ImageNet to obtain
	features, which can then be transferred to other tasks, our work tends to
	indicate the suitability of natural language inference for transfer learning to
	other NLP tasks. Our encoder is publicly available.},
  url       = {https://www.aclweb.org/anthology/D17-1070}
}

