@InProceedings{raganato-tiedemann:2018:BlackboxNLP,
  author    = {Raganato, Alessandro  and  Tiedemann, Jörg},
  title     = {An Analysis of Encoder Representations in Transformer-Based Machine Translation},
  booktitle = {Proceedings of the 2018 EMNLP Workshop BlackboxNLP: Analyzing and Interpreting Neural Networks for NLP},
  month     = {November},
  year      = {2018},
  address   = {Brussels, Belgium},
  publisher = {Association for Computational Linguistics},
  pages     = {287--297},
  abstract  = {The attention mechanism is a successful technique in modern NLP, especially in tasks like machine translation. The recently proposed network architecture of the Transformer is based entirely on attention mechanisms and achieves new state of the art results in neural machine translation, outperforming other sequence-to-sequence models.},
  url       = {http://www.aclweb.org/anthology/W18-5431}
}

