@InProceedings{assylbekov-EtAl:2017:EMNLP2017,
  author    = {Assylbekov, Zhenisbek  and  Takhanov, Rustem  and  Myrzakhmetov, Bagdat  and  Washington, Jonathan N.},
  title     = {Syllable-aware Neural Language Models: A Failure to Beat Character-aware Ones},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1866--1872},
  abstract  = {Syllabification does not seem to improve word-level RNN language modeling
	quality when compared to character-based segmentation. However, our best
	syllable-aware language model, achieving performance comparable to the
	competitive character-aware model, has 18%-33% fewer parameters and is trained
	1.2-2.2 times faster.},
  url       = {https://www.aclweb.org/anthology/D17-1199}
}

