@InProceedings{zhou-EtAl:2017:Long,
  author    = {Zhou, Qingyu  and  Yang, Nan  and  Wei, Furu  and  Zhou, Ming},
  title     = {Selective Encoding for Abstractive Sentence Summarization},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {1095--1104},
  abstract  = {We propose a selective encoding model to extend the sequence-to-sequence
	framework for abstractive sentence summarization. It consists of a sentence
	encoder, a selective gate network, and an attention equipped decoder. The
	sentence encoder and decoder are built with recurrent neural networks. The
	selective gate network constructs a second level sentence representation by
	controlling the information flow from encoder to decoder. The second level
	representation is tailored for sentence summarization task, which leads to
	better performance. We evaluate our model on the English Gigaword, DUC 2004 and
	MSR abstractive sentence summarization datasets. The experimental results show
	that the proposed selective encoding model outperforms the state-of-the-art
	baseline models.},
  url       = {http://aclweb.org/anthology/P17-1101}
}

