@InProceedings{amiri-miller-savova:2017:EMNLP2017,
  author    = {Amiri, Hadi  and  Miller, Timothy  and  Savova, Guergana},
  title     = {Repeat before Forgetting: Spaced Repetition for Efficient and Effective Training of Neural Networks},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {2401--2410},
  abstract  = {We present a novel approach for training artificial neural networks. Our
	approach is inspired by broad evidence in psychology that shows human learners
	can learn efficiently and effectively by increasing intervals of time between
	subsequent reviews of previously learned materials (spaced repetition). We
	investigate the analogy between training neural models and findings in
	psychology about human memory model and develop an efficient and effective
	algorithm to train neural models. The core part of our algorithm is a
	cognitively-motivated scheduler according to which training instances and their
	"reviews" are spaced over time. Our algorithm uses only 34-50% of data per
	epoch, is 2.9-4.8 times faster than standard training, and outperforms
	competing state-of-the-art baselines. Our code is available at
	scholar.harvard.edu/hadi/RbF/.},
  url       = {https://www.aclweb.org/anthology/D17-1255}
}

