@InProceedings{nicosia-moschitti:2017:CoNLL,
  author    = {Nicosia, Massimo  and  Moschitti, Alessandro},
  title     = {Learning Contextual Embeddings for Structural Semantic Similarity using Categorical Information},
  booktitle = {Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {260--270},
  abstract  = {Tree kernels (TKs) and neural networks are two effective approaches for
	automatic feature engineering. In this paper, we combine them by modeling
	context word similarity in semantic TKs. This way, the latter can operate
	subtree matching by applying neural-based similarity on tree lexical nodes. We
	study how to learn representations for the words in context such that TKs can
	exploit more focused information. We found that neural embeddings produced by
	current methods do not provide a suitable contextual similarity. Thus, we
	define a new approach based on a Siamese Network, which produces word
	representations while learning a binary text similarity. We set the latter
	considering examples in the same category as similar. The experiments on
	question and sentiment classification show that our semantic TK highly improves
	previous results.},
  url       = {http://aclweb.org/anthology/K17-1027}
}

