@InProceedings{feng-EtAl:2017:EMNLP2017,
  author    = {Feng, Yang  and  Zhang, Shiyue  and  Zhang, Andi  and  Wang, Dong  and  Abel, Andrew},
  title     = {Memory-augmented Neural Machine Translation},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1390--1399},
  abstract  = {Neural machine translation (NMT) has achieved notable success in recent times,
	however it is also widely recognized that this approach has limitations with
	handling infrequent words and word pairs. This paper presents a novel
	memory-augmented NMT (M-NMT) architecture, which stores knowledge about how
	words (usually infrequently encountered ones) should be translated in a memory
	and then utilizes them to assist the neural model. We use this memory mechanism
	to combine the knowledge learned from a conventional statistical machine
	translation system and the rules learned by an NMT system, and also propose a
	solution for out-of-vocabulary (OOV) words based on this framework. Our
	experiments on two Chinese-English translation tasks demonstrated that the
	M-NMT architecture outperformed the NMT baseline by $9.0$ and $2.7$ BLEU points
	on the two tasks, respectively. Additionally, we found this architecture
	resulted in a much more effective OOV treatment compared to competitive
	methods.},
  url       = {https://www.aclweb.org/anthology/D17-1146}
}

