@InProceedings{ji-EtAl:2017:Long,
  author    = {Ji, Jianshu  and  Wang, Qinlong  and  Toutanova, Kristina  and  Gong, Yongen  and  Truong, Steven  and  Gao, Jianfeng},
  title     = {A Nested Attention Neural Hybrid Model for Grammatical Error Correction},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {753--762},
  abstract  = {Grammatical error correction (GEC) systems strive to correct both global errors
	inword order and usage, and local errors inspelling and inflection. Further
	developing upon recent work on neural machine translation, we propose a new
	hybrid neural model with nested attention layers for GEC.Experiments show that
	the new model can effectively correct errors of both types by incorporating
	word and character-level information, and that the model significantly
	outperforms previous  neural models for GEC as measured on the standard
	CoNLL-14 benchmark dataset.Further analysis also shows that the superiority of
	the proposed model can be largely attributed to the use of the nested attention
	mechanism, which has proven particularly effective incorrecting local errors
	that involve small edits in orthography.},
  url       = {http://aclweb.org/anthology/P17-1070}
}

