@InProceedings{meng-EtAl:2017:Long,
  author    = {Meng, Rui  and  Zhao, Sanqiang  and  Han, Shuguang  and  He, Daqing  and  Brusilovsky, Peter  and  Chi, Yu},
  title     = {Deep Keyphrase Generation},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {582--592},
  abstract  = {Keyphrase provides highly-summative information that can be effectively used
	for understanding, organizing and retrieving text content. Though previous
	studies have provided many workable solutions for automated keyphrase
	extraction, they commonly divided the to-be-summarized content into multiple
	text chunks, then ranked and selected the most meaningful ones. These
	approaches could neither identify keyphrases that do not appear in the text,
	nor capture the real semantic meaning behind the text. We propose a generative
	model for keyphrase prediction with an encoder-decoder framework, which can
	effectively overcome the above drawbacks.  We name it as \textit{deep keyphrase
	generation} since it attempts to capture the deep semantic meaning of the
	content with a deep learning method. Empirical analysis on six datasets
	demonstrates that our proposed model not only achieves a significant
	performance boost on extracting keyphrases that appear in the source text, but
	also can generate absent keyphrases based on the semantic meaning of the text.
	Code and dataset are available at https://github.com/memray/seq2seq-keyphrase.},
  url       = {http://aclweb.org/anthology/P17-1054}
}

