@article{TACL1081,
	author = {Johnson, Melvin  and  Schuster, Mike  and  Le, Quoc  and  Krikun, Maxim  and  Wu, Yonghui  and  Chen, Zhifeng  and  Thorat, Nikhil  and  Viégas, Fernand a  and  Wattenberg, Martin  and  Corrado, Greg  and  Hughes, Macduff  and  Dean, Jeffrey},
	title = {Google's Multilingual Neural Machine Translation System: Enabling Zero-Shot Translation},
	journal = {Transactions of the Association for Computational Linguistics},
	volume = {5},
	year = {2017},
	keywords = {},
	abstract = {We propose a simple solution to use a single Neural Machine Translation (NMT) model to translate between multiple languages. Our solution requires no changes to the model architecture from a standard NMT system but instead introduces an artificial token at the beginning of the input sentence to specify the required target language. Using a shared wordpiece vocabulary, our approach enables Multilingual NMT using a single model. On the WMT’14 benchmarks, a single multilingual model achieves comparable performance for English→French and surpasses state-of-the-art results for English→German. Similarly, a single multilingual model surpasses state-of-the-art results for French→English and German→English on WMT’14 and WMT’15 benchmarks, respectively. On production corpora, multilingual models of up to twelve language pairs allow for better translation of many individual pairs. Our models can also learn to perform implicit bridging between language pairs never seen explicitly during training, showing that transfer learning and zero-shot translation is possible for neural translation. Finally, we show analyses that hints at a universal interlingua representation in our models and show some interesting examples when mixing languages.},
	issn = {2307-387X},
	url = {https://transacl.org/ojs/index.php/tacl/article/view/1081},
	pages = {339--351}
}