@InProceedings{yuan-EtAl:2017:I17-4,
  author    = {Yuan, Hang  and  Zhang, You  and  Wang, Jin  and  Zhang, Xuejie},
  title     = {YNU-HPCC at IJCNLP-2017 Task 5: Multi-choice Question Answering in Exams Using an Attention-based LSTM Model},
  booktitle = {Proceedings of the IJCNLP 2017, Shared Tasks},
  month     = {December},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {208--212},
  abstract  = {A shared task is a typical question answering task that aims to test how
	accurately the participants can answer the questions in exams. Typically, for
	each question, there are four candidate answers, and only one of the answers is
	correct. The existing methods for such a task usually implement a recurrent
	neural network (RNN) or long short-term memory (LSTM). However, both RNN and
	LSTM are biased models in which the words in the tail of a sentence are more
	dominant than the words in the header. In this paper, we propose the use of an
	attention-based LSTM (AT-LSTM) model for these tasks. By adding an attention
	mechanism to the standard LSTM, this model can more easily capture long
	contextual information.},
  url       = {http://www.aclweb.org/anthology/I17-4035}
}

