@InProceedings{yang-EtAl:2017:EMNLP20173,
  author    = {Yang, Zichao  and  Blunsom, Phil  and  Dyer, Chris  and  Ling, Wang},
  title     = {Reference-Aware Language Models},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1850--1859},
  abstract  = {We propose a general class of language models that treat reference as discrete
	stochastic latent variables. This decision allows for the creation of entity
	mentions by accessing external databases of referents (required by, e.g.,
	dialogue generation) or past internal state (required to explicitly model
	coreferentiality). Beyond simple copying, our coreference model can
	additionally refer to a referent using varied mention forms (e.g., a reference
	to “Jane” can be realized as “she”), a characteristic feature of
	reference in natural languages. Experiments on three representative
	applications show our model variants outperform models based on deterministic
	attention and standard language modeling baselines.},
  url       = {https://www.aclweb.org/anthology/D17-1197}
}

