@InProceedings{brarda-yeres-bowman:2017:RepL4NLP,
  author    = {Brarda, Sebastian  and  Yeres, Philip  and  Bowman, Samuel},
  title     = {Sequential Attention: A Context-Aware Alignment Function for Machine Reading},
  booktitle = {Proceedings of the 2nd Workshop on Representation Learning for NLP},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {75--80},
  abstract  = {In this paper we  propose a neural network model with a novel Sequential
	Attention layer that extends soft attention by assigning weights to words in an
	input sequence in a way that takes into account not just how well that word
	matches a query, but how well surrounding words match. We evaluate this
	approach on the task of reading comprehension (on the Who did What and CNN
	datasets) and show that it dramatically improves a strong baseline---the
	Stanford Reader---and is competitive with the state of the art.},
  url       = {http://www.aclweb.org/anthology/W17-2610}
}

