@InProceedings{nisioi-EtAl:2017:Short,
  author    = {Nisioi, Sergiu  and  \v{S}tajner, Sanja  and  Ponzetto, Simone Paolo  and  Dinu, Liviu P.},
  title     = {Exploring Neural Text Simplification Models},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {85--91},
  abstract  = {We present the first attempt at using sequence to sequence neural networks to
	model text simplification (TS). Unlike the previously proposed automated TS
	systems, our neural text simplification (NTS) systems are able to
	simultaneously perform lexical simplification and content reduction. An
	extensive human evaluation of the output has shown that NTS systems achieve
	almost perfect                          grammaticality and meaning preservation of
	output
	sentences and
	higher level of simplification than the state-of-the-art automated TS systems},
  url       = {http://aclweb.org/anthology/P17-2014}
}

