@InProceedings{cohan-EtAl:2018:N18-2,
  author    = {Cohan, Arman  and  Dernoncourt, Franck  and  Kim, Doo Soon  and  Bui, Trung  and  Kim, Seokhwan  and  Chang, Walter  and  Goharian, Nazli},
  title     = {A Discourse-Aware Attention Model for Abstractive Summarization of Long Documents},
  booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 2 (Short Papers)},
  month     = {June},
  year      = {2018},
  address   = {New Orleans, Louisiana},
  publisher = {Association for Computational Linguistics},
  pages     = {615--621},
  abstract  = {Neural abstractive summarization models have led to promising results in summarizing relatively short documents. We propose the first model for abstractive summarization of single, longer-form documents (e.g., research papers). Our approach consists of a new hierarchical encoder that models the discourse structure of a document, and an attentive discourse-aware decoder to generate the summary. Empirical results on two large-scale datasets of scientific papers show that our model significantly outperforms state-of-the-art models.},
  url       = {http://www.aclweb.org/anthology/N18-2097}
}

