@InProceedings{mou-EtAl:2016:COLING,
  author    = {Mou, Lili  and  Song, Yiping  and  Yan, Rui  and  Li, Ge  and  Zhang, Lu  and  Jin, Zhi},
  title     = {Sequence to Backward and Forward Sequences: A Content-Introducing Approach to Generative Short-Text Conversation},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {3349--3358},
  abstract  = {Using neural networks to generate replies in human-computer dialogue systems is
	attracting increasing attention over the past few years. However, the
	performance is not satisfactory: the neural network tends to generate safe,
	universally relevant replies which carry little meaning. In this paper, we
	propose a content-introducing approach to neural network-based generative
	dialogue systems. We first use pointwise mutual information (PMI) to predict a
	noun as a keyword, reflecting the main gist of the reply. We then propose
	seq2BF, a "sequence to backward and forward sequences" model, which generates a
	reply containing the given keyword. Experimental results show that our approach
	significantly outperforms traditional sequence-to-sequence models in terms of
	human evaluation and the entropy measure, and that the predicted keyword can
	appear at an appropriate position in the reply.},
  url       = {http://aclweb.org/anthology/C16-1316}
}

