@InProceedings{torabiasr-zinkov-jones:2018:N18-1,
  author    = {Torabi Asr, Fatemeh  and  Zinkov, Robert  and  Jones, Michael},
  title     = {Querying Word Embeddings for Similarity and Relatedness},
  booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers)},
  month     = {June},
  year      = {2018},
  address   = {New Orleans, Louisiana},
  publisher = {Association for Computational Linguistics},
  pages     = {675--684},
  abstract  = {Word embeddings obtained from neural network models such as Word2Vec Skipgram have become popular representations of word meaning and have been evaluated on a variety of word similarity and relatedness norming data. Skipgram generates a set of word and context embeddings, the latter typically discarded after training. We demonstrate the usefulness of context embeddings in predicting asymmetric association between words from a recently published dataset of production norms (Jouravlev \& McRae, 2016). Our findings suggest that humans respond with words closer to the cue within the context embedding space (rather than the word embedding space), when asked to generate thematically related words.},
  url       = {http://www.aclweb.org/anthology/N18-1062}
}

