@InProceedings{liu-baldwin-cohn:2017:I17-1,
  author    = {Liu, Fei  and  Baldwin, Timothy  and  Cohn, Trevor},
  title     = {Capturing Long-range Contextual Dependencies with Memory-enhanced Conditional Random Fields},
  booktitle = {Proceedings of the Eighth International Joint Conference on Natural Language Processing (Volume 1: Long Papers)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {555--565},
  abstract  = {Despite successful applications across a broad range of NLP tasks, conditional
	random fields (``CRFs''), in particular the linear-chain variant, are only able
	to model local features.
	  While this has important benefits in terms of inference tractability, it
	limits the ability of the model to capture long-range dependencies between
	items.
	  Attempts to extend CRFs to capture long-range dependencies have largely come
	at the cost of computational complexity and approximate inference.
	  In this work, we propose an extension to CRFs by integrating external memory,
	taking inspiration from memory networks, thereby allowing CRFs to incorporate
	information far beyond neighbouring steps.
	  Experiments across two tasks show substantial improvements over strong CRF
	and LSTM baselines.},
  url       = {http://www.aclweb.org/anthology/I17-1056}
}

