@InProceedings{semeniuta-severyn-barth:2016:COLING,
  author    = {Semeniuta, Stanislau  and  Severyn, Aliaksei  and  Barth, Erhardt},
  title     = {Recurrent Dropout without Memory Loss},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {1757--1766},
  abstract  = {This paper presents a novel approach to recurrent neural network (RNN)
	regularization. Differently from the widely adopted dropout method, which is
	applied to forward connections of feedforward architectures or RNNs, we propose
	to drop neurons directly in recurrent connections in a way that does not cause
	loss of long-term memory. Our approach is as easy to implement and apply as the
	regular feed-forward dropout and we demonstrate its effectiveness for the most
	effective modern recurrent network -- Long Short-Term Memory network. Our
	experiments on three NLP benchmarks show consistent improvements even when
	combined with conventional feed-forward dropout.},
  url       = {http://aclweb.org/anthology/C16-1165}
}

