@InProceedings{jameel-schockaert:2017:CoNLL,
  author    = {JAMEEL, SHOAIB  and  SCHOCKAERT, STEVEN},
  title     = {Modeling Context Words as Regions: An Ordinal Regression Approach to Word Embedding},
  booktitle = {Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {123--133},
  abstract  = {Vector representations of word meaning have found many applications in the
	field of natural language processing. Word vectors intuitively represent the
	average context in which a given word tends to occur, but they cannot
	explicitly model the diversity of these contexts. Although region
	representations of word meaning offer a natural alternative to word vectors,
	only few methods have been proposed that can effectively learn word regions. In
	this paper, we propose a new word embedding model which is based on SVM
	regression. We show that the underlying ranking interpretation of word contexts
	is sufficient to match, and sometimes outperform, the performance of popular
	methods such as Skip-gram. Furthermore, we show that by using a quadratic
	kernel, we can effectively learn word regions, which outperform existing
	unsupervised models for the task of hypernym detection.},
  url       = {http://aclweb.org/anthology/K17-1014}
}

