@InProceedings{wang-che-liu:2016:COLING,
  author    = {Wang, Shaolei  and  Che, Wanxiang  and  Liu, Ting},
  title     = {A Neural Attention Model for Disfluency Detection},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {278--287},
  abstract  = {In this paper, we study the problem of disfluency detection using the
	encoder-decoder framework. We treat disfluency detection as a
	sequence-to-sequence problem and propose a neural attention-based model which
	can efficiently model the long-range dependencies between words and make the
	resulting sentence more likely to be grammatically correct. Our model firstly
	encode the source sentence with a bidirectional Long Short-Term Memory
	(BI-LSTM) and then use the neural attention as a pointer to select an ordered
	sub sequence of the input as the output. Experiments show that our model
	achieves the state-of-the-art f-score of  86.7\% on the commonly used English
	Switchboard test set. We also evaluate the performance of our model on the
	in-house annotated Chinese data and achieve a significantly higher f-score
	compared to the baseline of CRF-based approach.},
  url       = {http://aclweb.org/anthology/C16-1027}
}

