@InProceedings{niehues-EtAl:2016:COLING,
  author    = {Niehues, Jan  and  Cho, Eunah  and  Ha, Thanh-Le  and  Waibel, Alex},
  title     = {Pre-Translation for Neural Machine Translation},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {1828--1836},
  abstract  = {Recently, the development of neural machine translation (NMT) has significantly
	improved the translation quality of automatic machine translation. While most
	sentences are more accurate and fluent than translations by statistical machine
	translation (SMT)-based systems, in some cases, the NMT system produces
	translations that have a completely different meaning. This is especially the
	case when rare words occur.
	When using statistical machine translation, it has already been shown that
	significant gains can be achieved by simplifying the input in a preprocessing
	step. A commonly used example is the pre-reordering approach.
	In this work, we used phrase-based machine translation to pre-translate the
	input into the target language. Then a neural machine translation system
	generates the final hypothesis using the pre-translation. Thereby, we use
	either only the output of the phrase-based machine translation (PBMT) system or
	a combination of the PBMT output and the source sentence. 
	We evaluate the technique on the English to German translation task.
	Using this approach we are able to outperform the PBMT system as well as the
	baseline neural MT system by up to 2 BLEU points. We analyzed the influence of
	the quality of the initial system on the final result.},
  url       = {http://aclweb.org/anthology/C16-1172}
}

