@InProceedings{prakash-EtAl:2016:COLING,
  author    = {prakash, aaditya  and  Hasan, Sadid A.  and  Lee, Kathy  and  Datla, Vivek  and  Qadir, Ashequl  and  Liu, Joey  and  Farri, Oladimeji},
  title     = {Neural Paraphrase Generation with Stacked Residual LSTM Networks},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {2923--2934},
  abstract  = {In this paper, we propose a novel neural approach for paraphrase generation.
	Conventional paraphrase generation methods either leverage hand-written rules
	and thesauri-based alignments, or use statistical machine learning principles.
	To the best of our knowledge, this work is the first to explore deep learning
	models for paraphrase generation. Our primary contribution is a stacked
	residual LSTM network, where we add residual connections between LSTM layers.
	This allows for efficient training of deep LSTMs. We evaluate our model and
	other state-of-the-art deep learning models on three different datasets: PPDB,
	WikiAnswers, and MSCOCO. Evaluation results demonstrate that our model
	outperforms sequence to sequence, attention-based, and bi-directional LSTM
	models on BLEU, METEOR, TER, and an embedding-based sentence similarity metric.},
  url       = {http://aclweb.org/anthology/C16-1275}
}

