@InProceedings{kann-schutze:2017:SCLeM,
  author    = {Kann, Katharina  and  Sch\"{u}tze, Hinrich},
  title     = {Unlabeled Data for Morphological Generation With Character-Based Sequence-to-Sequence Models},
  booktitle = {Proceedings of the First Workshop on Subword and Character Level Models in NLP},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {76--81},
  abstract  = {We present a semi-supervised way of training a character-based encoder-decoder
	recurrent neural network for morphological reinflection---the task of
	generating one inflected wordform from another. This is achieved by using
	unlabeled tokens or random strings as training data for an autoencoding task,
	adapting a network for morphological reinflection, and performing multi-task
	training.
	We thus use limited labeled data more effectively, obtaining up to 9.92%
	improvement over state-of-the-art baselines for 8 different languages.},
  url       = {http://www.aclweb.org/anthology/W17-4111}
}

