@InProceedings{lau-baldwin-cohn:2017:Long,
  author    = {Lau, Jey Han  and  Baldwin, Timothy  and  Cohn, Trevor},
  title     = {Topically Driven Neural Language Model},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {355--365},
  abstract  = {Language models are typically applied at the sentence level, without
	  access to the broader document context.  We present a neural language
	  model that incorporates document context in the form of a topic
	  model-like architecture, thus providing a succinct representation of the
	  broader document context outside of the current sentence.  Experiments
	  over a range of datasets demonstrate that our model outperforms a pure
	  sentence-based model in terms of language model perplexity, and leads
	  to topics that are potentially more coherent than those produced by a
	  standard LDA topic model.  Our model also has the ability to generate
	  related sentences for a topic, providing another way to interpret topics.},
  url       = {http://aclweb.org/anthology/P17-1033}
}

