@InProceedings{wang-EtAl:2017:EMNLP20171,
  author    = {Wang, Kexiang  and  Liu, Tianyu  and  Sui, Zhifang  and  Chang, Baobao},
  title     = {Affinity-Preserving Random Walk for Multi-Document Summarization},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {210--220},
  abstract  = {Multi-document summarization provides users with a short text that summarizes
	the information in a set of related documents. This paper introduces
	affinity-preserving random walk to the summarization task, which preserves the
	affinity relations of sentences by an absorbing random walk model. Meanwhile,
	we put forward adjustable affinity-preserving random walk to enforce the
	diversity constraint of summarization in the random walk process. The ROUGE
	evaluations on DUC 2003 topic-focused summarization task and DUC 2004 generic
	summarization task show the good performance of our method, which has the best
	ROUGE-2 recall among the graph-based ranking methods.
	Author{3}{Affiliation}},
  url       = {https://www.aclweb.org/anthology/D17-1020}
}

