@InProceedings{cao-EtAl:2016:COLING1,
  author    = {Cao, Ziqiang  and  Li, Wenjie  and  Li, Sujian  and  Wei, Furu  and  Li, Yanran},
  title     = {AttSum: Joint Learning of Focusing and Summarization with Neural Attention},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {547--556},
  abstract  = {Query relevance ranking and sentence saliency ranking are the two main tasks in
	extractive query-focused summarization.  Previous supervised summarization
	systems often perform the two tasks in isolation. However, since reference
	summaries are the trade-off between relevance and saliency, using them as
	supervision, neither of the two rankers could be trained well. This paper
	proposes a novel summarization system called AttSum, which tackles the two
	tasks jointly. It automatically learns distributed representations for
	sentences as well as the document cluster. Meanwhile, it applies the attention
	mechanism to simulate the attentive reading of human behavior when a query is
	given. Extensive experiments are conducted on DUC query-focused summarization
	benchmark datasets. Without using any hand-crafted features, AttSum achieves
	competitive performance. We also observe that the sentences recognized to focus
	on the query indeed meet the query need.},
  url       = {http://aclweb.org/anthology/C16-1053}
}

