@InProceedings{adel-schutze:2017:EACLlong,
  author    = {Adel, Heike  and  Sch\"{u}tze, Hinrich},
  title     = {Exploring Different Dimensions of Attention for Uncertainty Detection},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {22--34},
  abstract  = {Neural networks with attention have proven effective for many natural language
	processing tasks. In this paper, we develop attention mechanisms for
	uncertainty detection. In particular, we generalize standardly used attention
	mechanisms by introducing external attention and sequence-preserving attention.
	These novel architectures differ from standard approaches in that they use
	external resources to compute attention weights and preserve sequence
	information. We compare them to other configurations along different dimensions
	of attention. Our novel architectures set the new state of the art on a
	Wikipedia benchmark dataset and perform similar to the state-of-the-art model
	on a biomedical benchmark which uses a large set of linguistic features.},
  url       = {http://www.aclweb.org/anthology/E17-1003}
}

