@InProceedings{lakew-cettolo-federico:2018:C18-1,
  author    = {Lakew, Surafel Melaku  and  Cettolo, Mauro  and  Federico, Marcello},
  title     = {A Comparison of Transformer and Recurrent Neural Networks on Multilingual Neural Machine Translation},
  booktitle = {Proceedings of the 27th International Conference on Computational Linguistics},
  month     = {August},
  year      = {2018},
  address   = {Santa Fe, New Mexico, USA},
  publisher = {Association for Computational Linguistics},
  pages     = {641--652},
  abstract  = {Recently, neural machine translation (NMT) has been extended to multilinguality, that is to handle more than one translation direction with a single system. Multilingual NMT showed competitive performance against pure bilingual systems. Notably, in low-resource settings, it proved to work effectively and efficiently, thanks to shared representation space that is forced across languages and induces a sort of transfer-learning. Furthermore, multilingual NMT enables so-called zero-shot inference across language pairs never seen at training time. Despite the increasing interest},
  url       = {http://www.aclweb.org/anthology/C18-1054}
}

