@InProceedings{meng-EtAl:2016:COLING,
  author    = {Meng, Fandong  and  Lu, Zhengdong  and  Li, Hang  and  Liu, Qun},
  title     = {Interactive Attention for Neural Machine Translation},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {2174--2185},
  abstract  = {Conventional attention-based Neural Machine Translation (NMT) conducts dynamic
	alignment in generating the target sentence. By repeatedly reading the
	representation of source sentence, which keeps fixed after generated by the
	encoder (Bahdanau et al., 2015), the attention mechanism has greatly enhanced
	state-of-the-art NMT. In this paper, we pro- pose a new attention mechanism,
	called INTERACTIVE ATTENTION, which models the interaction between the decoder
	and the representation of source sentence during translation by both reading
	and writing operations. INTERACTIVE ATTENTION can keep track of the interaction
	history and therefore improve the translation performance. Experiments on NIST
	Chinese-English translation task show that INTERACTIVE ATTENTION can achieve
	significant improvements over both the previous attention-based NMT baseline
	and some state-of-the-art variants of attention-based NMT (i.e., coverage
	models (Tu et al., 2016)). And neural machine translator with our INTERACTIVE
	ATTENTION can outperform the open source attention-based NMT system Groundhog
	by 4.22 BLEU points and the open source phrase-based system Moses by 3.94 BLEU
	points averagely on multiple test sets.
	Author{2}{Affiliation}},
  url       = {http://aclweb.org/anthology/C16-1205}
}

