@InProceedings{ji-EtAl:2017:EMNLP2017,
  author    = {Ji, Yangfeng  and  Tan, Chenhao  and  Martschat, Sebastian  and  Choi, Yejin  and  Smith, Noah A.},
  title     = {Dynamic Entity Representations in Neural Language Models},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1830--1839},
  abstract  = {Understanding a long document requires tracking how entities are introduced and
	evolve over time. We present a new type of language model, EntityNLM, that can
	explicitly model entities, dynamically update their representations, and
	contextually generate their mentions. Our model is generative and flexible; it
	can model an arbitrary number of entities in context while generating each
	entity mention at an arbitrary length. In addition, it can be used for several
	different tasks such as language modeling, coreference resolution, and entity
	prediction. Experimental results with all these tasks demonstrate that our
	model consistently outperforms strong baselines and prior work.},
  url       = {https://www.aclweb.org/anthology/D17-1195}
}

