@InProceedings{kuchaiev-EtAl:2018:NLP-OSS,
  author    = {Kuchaiev, Oleksii  and  Ginsburg, Boris  and  Gitman, Igor  and  Lavrukhin, Vitaly  and  Case, Carl  and  Micikevicius, Paulius},
  title     = {OpenSeq2Seq: Extensible Toolkit for Distributed and Mixed Precision Training of Sequence-to-Sequence Models},
  booktitle = {Proceedings of Workshop for NLP Open Source Software (NLP-OSS)},
  month     = {July},
  year      = {2018},
  address   = {Melbourne, Australia},
  publisher = {Association for Computational Linguistics},
  pages     = {41--46},
  abstract  = {We present OpenSeq2Seq -- an open-source toolkit for training sequence-to-sequence models. The main goal of our toolkit is to allow researchers to most effectively explore different sequence-to-sequence architectures. The efficiency is achieved by fully supporting distributed and mixed-precision training. OpenSeq2Seq provides building blocks for training encoder-decoder models for neural machine translation and automatic speech recognition. We plan to extend it with other modalities in the future.},
  url       = {http://www.aclweb.org/anthology/W18-2507}
}

