@InProceedings{zhang-EtAl:2017:I17-2,
  author    = {Zhang, Dakun  and  Kim, Jungi  and  Crego, Josep  and  Senellart, Jean},
  title     = {Boosting Neural Machine Translation},
  booktitle = {Proceedings of the Eighth International Joint Conference on Natural Language Processing (Volume 2: Short Papers)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {271--276},
  abstract  = {Training efficiency is one of the main problems for Neural Machine Translation
	(NMT). Deep networks need for very large data as well as many training
	iterations to achieve state-of-the-art performance. This results in very high
	computation cost, slowing down research and industrialisation. In this paper,
	we propose to alleviate this problem with several training methods based on
	data boosting and bootstrap with no modifications to the neural network.
	It imitates the learning process of humans, which typically spend more time
	when learning "difficult" concepts than easier ones. We experiment on an
	English-French translation task showing accuracy improvements of up to 1.63
	BLEU while saving 20% of training time.},
  url       = {http://www.aclweb.org/anthology/I17-2046}
}

