@InProceedings{kocmi-varivs-bojar:2017:WAT2017,
  author    = {Kocmi, Tom  and  Vari\v{s}, Du\v{s}an  and  Bojar, Ond\v{r}ej},
  title     = {CUNI NMT System for WAT 2017 Translation Tasks},
  booktitle = {Proceedings of the 4th Workshop on Asian Translation (WAT2017)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {154--159},
  abstract  = {The paper presents this year's CUNI submissions to the WAT 2017 Translation
	Task focusing on the Japanese-English translation, namely Scientific papers
	subtask,
	Patents subtask and Newswire subtask. We compare two neural network
	architectures, the standard sequence-to-sequence with attention (Seq2Seq) and
	an
	architecture using convolutional sentence encoder (FBConv2Seq), both
	implemented in the NMT framework Neural Monkey that we currently participate in
	developing.
	We also compare various types of preprocessing of the source Japanese sentences
	and their impact on the overall results. Furthermore, we include the results of
	our
	experiments with out-of-domain data obtained by combining the corpora provided
	for each
	subtask.},
  url       = {http://www.aclweb.org/anthology/W17-5715}
}

