@InProceedings{zhang-EtAl:2017:Long3,
  author    = {Zhang, Jinchao  and  Wang, Mingxuan  and  Liu, Qun  and  Zhou, Jie},
  title     = {Incorporating Word Reordering Knowledge into Attention-based Neural Machine Translation},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {1524--1534},
  abstract  = {This paper proposes three distortion models to explicitly incorporate the word
	reordering knowledge into attention-based Neural Machine Translation (NMT) for
	further improving translation performance. Our proposed models enable attention
	mechanism to attend to source words regarding both the semantic requirement and
	the word reordering penalty. Experiments on Chinese-English translation show
	that the approaches can improve word alignment quality and achieve significant
	translation improvements over a basic attention-based NMT by large margins.
	Compared with previous works on identical corpora, our system achieves the
	state-of-the-art performance on translation quality.},
  url       = {http://aclweb.org/anthology/P17-1140}
}

