@InProceedings{chen-EtAl:2017:Long5,
  author    = {Chen, Yun  and  Liu, Yang  and  Cheng, Yong  and  Li, Victor O.K.},
  title     = {A Teacher-Student Framework for Zero-Resource Neural Machine Translation},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {1925--1935},
  abstract  = {While end-to-end neural machine translation (NMT) has made remarkable progress
	recently, it still suffers from the data scarcity problem for low-resource
	language pairs and domains. In this paper, we propose a method for
	zero-resource NMT by assuming that parallel sentences have close probabilities
	of generating a sentence in a third language. Based on the assumption, our
	method is able to train a source-to-target NMT model (``student'') without
	parallel corpora available guided by an existing pivot-to-target NMT model
	(``teacher'') on a source-pivot parallel corpus. Experimental results show that
	the proposed method significantly improves over a baseline pivot-based model by
	+3.0 BLEU points across various language pairs.},
  url       = {http://aclweb.org/anthology/P17-1176}
}

