@InProceedings{lucy-gauthier:2017:RoboNLP,
  author    = {Lucy, Li  and  Gauthier, Jon},
  title     = {Are Distributional Representations Ready for the Real World? Evaluating Word Vectors for Grounded Perceptual Meaning},
  booktitle = {Proceedings of the First Workshop on Language Grounding for Robotics},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {76--85},
  abstract  = {Distributional word representation methods exploit word co-occurrences to build
	compact vector encodings of words. While these representations enjoy widespread
	use in modern natural language processing, it is unclear whether they
	accurately encode all necessary facets of conceptual meaning. In this paper, we
	evaluate how well these representations can predict perceptual and conceptual
	features of concrete concepts, drawing on two semantic norm datasets sourced
	from human participants. We find that several standard word representations
	fail to encode many salient perceptual features of concepts, and show that
	these deficits correlate with word-word similarity prediction errors. Our
	analyses provide motivation for grounded and embodied language learning
	approaches, which may help to remedy these deficits.},
  url       = {http://www.aclweb.org/anthology/W17-2810}
}

