@InProceedings{romeo-EtAl:2016:COLING,
  author    = {Romeo, Salvatore  and  Da San Martino, Giovanni  and  Barr\'{o}n-Cede\~{n}o, Alberto  and  Moschitti, Alessandro  and  Belinkov, Yonatan  and  Hsu, Wei-Ning  and  Zhang, Yu  and  Mohtarami, Mitra  and  Glass, James},
  title     = {Neural Attention for Learning to Rank Questions in Community Question Answering},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {1734--1745},
  abstract  = {In real-world data, e.g., from Web forums,  text is often contaminated with
	redundant or irrelevant content, which leads to introducing noise in machine
	learning algorithms.
	In this paper, we apply Long Short-Term Memory networks with an attention
	mechanism, which can select important parts of text for the task of similar
	question retrieval from community Question Answering (cQA) forums. 
	In particular, we use the attention weights for both selecting entire sentences
	and their subparts, i.e., word/chunk, from shallow syntactic trees. More
	interestingly, we apply tree kernels to the filtered text representations, thus
	exploiting the implicit features of the subtree space for learning question
	reranking. Our results show that the attention-based pruning allows for
	achieving the top position in the cQA challenge of SemEval 2016, with a
	relatively large gap from the other participants while greatly decreasing
	running time.},
  url       = {http://aclweb.org/anthology/C16-1163}
}

