@InProceedings{liu-lapata:2017:EMNLP2017,
  author    = {Liu, Yang  and  Lapata, Mirella},
  title     = {Learning Contextually Informed Representations for Linear-Time Discourse Parsing},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1289--1298},
  abstract  = {Recent advances in RST discourse parsing have focused on two modeling
	paradigms: (a) high order parsers which jointly predict the tree structure of
	the discourse and the relations it encodes; or                                (b)
	linear-time
	parsers
	which
	are efficient but mostly based on local features.  In this work, we propose a
	linear-time parser with a novel way of representing discourse constituents
	based on neural networks which takes into account global contextual information
	and is able to capture long-distance dependencies. Experimental results show
	that our parser obtains state-of-the art performance on benchmark datasets,
	while being efficient (with time complexity linear in the number of sentences
	in the document) and requiring minimal feature engineering.},
  url       = {https://www.aclweb.org/anthology/D17-1133}
}

