@InProceedings{munkhdalai-yu:2017:EACLlong2,
  author    = {Munkhdalai, Tsendsuren  and  Yu, Hong},
  title     = {Neural Semantic Encoders},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {397--407},
  abstract  = {We present a memory augmented neural
	network for natural language understand-
	ing: Neural Semantic Encoders. NSE is
	equipped with a novel memory update rule
	and has a variable sized encoding memory
	that evolves over time and maintains the
	understanding of input sequences through
	read, compose and write operations. NSE
	can also access 1 multiple and shared mem-
	ories. In this paper, we demonstrated the
	effectiveness and the flexibility of NSE
	on five different natural language tasks:
	natural language inference, question an-
	swering, sentence classification, document
	sentiment analysis and machine transla-
	tion where NSE achieved state-of-the-art
	performance when evaluated on publically
	available benchmarks. For example, our
	shared-memory model showed an encour-
	aging result on neural machine translation,
	improving an attention-based baseline by
	approximately 1.0 BLEU.},
  url       = {http://www.aclweb.org/anthology/E17-1038}
}

