@InProceedings{libovicky-helcl:2017:Short,
  author    = {Libovick\'{y}, Jind\v{r}ich  and  Helcl, Jind\v{r}ich},
  title     = {Attention Strategies for Multi-Source Sequence-to-Sequence Learning},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {196--202},
  abstract  = {Modeling attention in neural multi-source sequence-to-sequence learning remains
	a relatively unexplored area, despite its usefulness in tasks that incorporate
	multiple source languages or modalities.
	We propose two novel approaches to combine the outputs of attention mechanisms
	over each source sequence, flat and hierarchical.
	We compare the proposed methods with existing techniques and present results of
	systematic evaluation of those methods on the WMT16 Multimodal Translation and
	Automatic Post-editing tasks.
	We show that the proposed methods achieve competitive results on both tasks.},
  url       = {http://aclweb.org/anthology/P17-2031}
}

