@InProceedings{wang-EtAl:2017:Short3,
  author    = {Wang, Rui  and  Finch, Andrew  and  Utiyama, Masao  and  Sumita, Eiichiro},
  title     = {Sentence Embedding for Neural Machine Translation Domain Adaptation},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {560--566},
  abstract  = {Although new corpora are becoming increasingly available for machine
	translation, only those that belong to the same or similar domains are
	typically able to improve translation performance. Recently Neural Machine
	Translation (NMT) has become prominent in the field. However, most of the
	existing domain adaptation methods only focus on phrase-based machine
	translation. In this paper, we exploit the NMT's internal embedding of the
	source sentence and use the sentence embedding similarity to select the
	sentences which are close to in-domain data. The empirical adaptation results
	on the IWSLT English-French and NIST Chinese-English tasks show that the
	proposed methods can substantially improve NMT performance by 2.4-9.0 BLEU
	points, outperforming the existing state-of-the-art baseline by 2.3-4.5 BLEU
	points.},
  url       = {http://aclweb.org/anthology/P17-2089}
}

