@InProceedings{yang-EtAl:2017:EACLshort1,
  author    = {Yang, Zichao  and  Hu, Zhiting  and  Deng, Yuntian  and  Dyer, Chris  and  Smola, Alex},
  title     = {Neural Machine Translation with Recurrent Attention Modeling},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 2, Short Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {383--387},
  abstract  = {Knowing which words have been attended to in previous time steps while
	generating a translation is a rich source of information for predicting what
	words will be attended to in the future. We improve upon the attention model of
	Bahdanau et al. (2014) by explicitly modeling the relationship between previous
	and subsequent attention levels for each word using one recurrent network per
	input word. This architecture easily captures informative features, such as
	fertility and regularities in relative distortion. In experiments, we show our
	parameterization of attention improves translation quality.},
  url       = {http://www.aclweb.org/anthology/E17-2061}
}

