@InProceedings{moirangthem-son-lee:2017:RepL4NLP,
  author    = {Moirangthem, Dennis Singh  and  Son, Jegyung  and  Lee, Minho},
  title     = {Representing Compositionality based on Multiple Timescales Gated Recurrent Neural Networks with Adaptive Temporal Hierarchy for Character-Level Language Models},
  booktitle = {Proceedings of the 2nd Workshop on Representation Learning for NLP},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {131--138},
  abstract  = {A novel character-level neural language model is proposed in this paper. The
	proposed model incorporates a biologically inspired temporal hierarchy in the
	architecture for representing multiple compositions of language in order to
	handle longer sequences for the character-level language model. The temporal
	hierarchy is introduced in the language model by utilizing a Gated Recurrent
	Neural Network with multiple timescales. The proposed model incorporates a
	timescale adaptation mechanism for enhancing the performance of the language
	model. We evaluate our proposed model using the popular Penn Treebank and Text8
	corpora. The experiments show that the use of multiple timescales in a Neural
	Language Model (NLM) enables improved performance despite having fewer
	parameters and with no additional computation requirements. Our experiments
	also demonstrate the ability of the adaptive temporal hierarchies to represent
	multiple compositonality without the help of complex hierarchical architectures
	and shows that better representation of the longer sequences lead to enhanced
	performance of the probabilistic language model.},
  url       = {http://www.aclweb.org/anthology/W17-2616}
}

