@InProceedings{hirschmann-nam-furnkranz:2016:COLING,
  author    = {Hirschmann, Fabian  and  Nam, Jinseok  and  F\"{u}rnkranz, Johannes},
  title     = {What Makes Word-level Neural Machine Translation Hard: A Case Study on English-German Translation},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {3199--3208},
  abstract  = {Traditional machine translation systems often require heavy feature engineering
	and the combination of multiple techniques for solving different subproblems.
	In recent years, several end-to-end learning architectures based on recurrent
	neural networks have been proposed. Unlike traditional systems, Neural Machine
	Translation (NMT) systems learn the parameters of the model and require only
	minimal preprocessing. Memory and time constraints allow to take only a fixed
	number of words into account, which leads to the out-of-vocabulary (OOV)
	problem. In this work, we analyze why the OOV problem arises and why it is
	considered a serious problem in German. We study the effectiveness of compound
	word splitters for alleviating the OOV problem, resulting in a 2.5+ BLEU points
	improvement over a baseline on the WMT'14 German-to-English translation task.
	For English-to-German translation, we use target-side compound splitting
	through a special syntax during training that allows the model to merge
	compound words and gain 0.2 BLEU points.},
  url       = {http://aclweb.org/anthology/C16-1301}
}

