@InProceedings{verwimp-EtAl:2017:EACLlong,
  author    = {Verwimp, Lyan  and  Pelemans, Joris  and  Van hamme, Hugo  and  Wambacq, Patrick},
  title     = {Character-Word LSTM Language Models},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {417--427},
  abstract  = {We present a Character-Word Long Short-Term Memory Language Model which both
	reduces the perplexity with respect to a baseline word-level language model and
	reduces the number of parameters of the model. Character information can reveal
	structural (dis)similarities between words and can even be used when a word is
	out-of-vocabulary, thus improving the modeling of infrequent and unknown words.
	By concatenating word and character embeddings, we achieve up to 2.77% relative
	improvement on English compared to a baseline model with a similar amount of
	parameters and 4.57% on Dutch. Moreover, we also outperform baseline word-level
	models with a larger number of parameters.},
  url       = {http://www.aclweb.org/anthology/E17-1040}
}

