@InProceedings{wang-zhang-chang:2017:EMNLP2017,
  author    = {Wang, Zhongqing  and  Zhang, Yue  and  Chang, Ching-Yun},
  title     = {Integrating Order Information and Event Relation for Script Event Prediction},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {57--67},
  abstract  = {There has been a recent line of work automatically learning scripts from
	unstructured texts, by modeling narrative event chains. While the dominant
	approach group events using event pair relations, LSTMs have been used to
	encode full chains of narrative events. The latter has the advantage of
	learning long-range temporal orders, yet the former is more adaptive to partial
	orders. We propose a neural model that leverages the advantages of both
	methods, by using LSTM hidden states as features for event pair modelling. A
	dynamic memory network is utilized to automatically induce weights on existing
	events for inferring a subsequent event. Standard evaluation shows that our
	method significantly outperforms both methods above, giving the best results
	reported so far.},
  url       = {https://www.aclweb.org/anthology/D17-1006}
}

