@InProceedings{kunze-EtAl:2017:RepL4NLP,
  author    = {Kunze, Julius  and  Kirsch, Louis  and  Kurenkov, Ilia  and  Krug, Andreas  and  Johannsmeier, Jens  and  Stober, Sebastian},
  title     = {Transfer Learning for Speech Recognition on a Budget},
  booktitle = {Proceedings of the 2nd Workshop on Representation Learning for NLP},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {168--177},
  abstract  = {End-to-end training of automated speech recognition (ASR) systems requires
	massive data and compute resources. We explore transfer learning based on model
	adaptation as an approach for training ASR models under constrained GPU memory,
	throughput and training data. We conduct several systematic experiments
	adapting a Wav2Letter convolutional neural network originally trained for
	English ASR to the German language. We show that this technique allows faster
	training on consumer-grade resources while requiring less training data in
	order to achieve the same accuracy, thereby lowering the cost of training ASR
	models in other languages. Model introspection revealed that small adaptations
	to the network's weights were sufficient for good performance, especially for
	inner layers.},
  url       = {http://www.aclweb.org/anthology/W17-2620}
}

