@InProceedings{gabor-EtAl:2017:EMNLP2017,
  author    = {G\'{a}bor, Kata  and  Zargayouna, Haifa  and  Tellier, Isabelle  and  Buscaldi, Davide  and  Charnois, Thierry},
  title     = {Exploring Vector Spaces for Semantic Relations},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1814--1823},
  abstract  = {Word embeddings are used with success for a variety of tasks involving lexical
	semantic similarities between individual words. Using unsupervised methods and
	just cosine similarity, encouraging results were obtained for analogical
	similarities. In this paper, we explore the potential of pre-trained word
	embeddings to identify generic types of semantic relations in an unsupervised
	experiment. We propose a new relational similarity measure based on the
	combination of word2vec's CBOW input and output vectors which outperforms
	concurrent vector representations, when used for unsupervised clustering on
	SemEval 2010 Relation Classification data.},
  url       = {https://www.aclweb.org/anthology/D17-1193}
}

