@InProceedings{godin-dambre-deneve:2017:RepL4NLP,
  author    = {Godin, Fr\'{e}deric  and  Dambre, Joni  and  De Neve, Wesley},
  title     = {Improving Language Modeling using Densely Connected Recurrent Neural Networks},
  booktitle = {Proceedings of the 2nd Workshop on Representation Learning for NLP},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {186--190},
  abstract  = {In this paper, we introduce the novel concept of densely connected layers into
	recurrent neural networks. We evaluate our proposed architecture on the Penn
	Treebank language modeling task. We show that we can obtain similar perplexity
	scores with six times fewer parameters compared to a standard stacked 2-
	layer LSTM model trained with dropout (Zaremba et al., 2014). In contrast with
	the current usage of skip connections, we show that densely connecting only a
	few
	stacked layers with skip connections already yields significant perplexity
	reductions.},
  url       = {http://www.aclweb.org/anthology/W17-2622}
}

