@InProceedings{cui-EtAl:2017:Long,
  author    = {Cui, Yiming  and  Chen, Zhipeng  and  Wei, Si  and  Wang, Shijin  and  Liu, Ting  and  Hu, Guoping},
  title     = {Attention-over-Attention Neural Networks for Reading Comprehension},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {593--602},
  abstract  = {Cloze-style reading comprehension is a representative problem in mining
	relationship between document and query.
	In this paper, we present a simple but novel model called
	attention-over-attention reader for better solving cloze-style reading
	comprehension task.
	The proposed model aims to place another attention mechanism over the
	document-level attention and induces ``attended attention'' for final answer
	predictions.
	One advantage of our model is that it is simpler than related works while
	giving excellent performance.
	In addition to the primary model, we also propose an N-best re-ranking strategy
	to double check the validity of the candidates and further improve the
	performance.
	Experimental results show that the proposed methods significantly outperform
	various state-of-the-art systems by a large margin in public datasets, such as
	CNN and Children's Book Test.},
  url       = {http://aclweb.org/anthology/P17-1055}
}

