@InProceedings{hu-zhang-zheng:2016:COLING,
  author    = {Hu, Wenpeng  and  Zhang, Jiajun  and  Zheng, Nan},
  title     = {Different Contexts Lead to Different Word Embeddings},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {762--771},
  abstract  = {Recent work for learning word representations has applied successfully to many
	NLP applications, such as sentiment analysis and question answering. However,
	most of these models assume a single vector per word type without considering
	polysemy and homonymy. In this paper, we present an extension to the CBOW model
	which not only improves the quality of embeddings but also makes embeddings
	suitable for polysemy. It differs from most of the related work in that it
	learns one semantic center embedding and one context bias instead of training
	multiple embeddings per word type. Different context leads to different bias
	which is defined as the weighted average embeddings of local context.
	Experimental results on similarity task and analogy task show that the word
	representations learned by the proposed method outperform the competitive
	baselines.},
  url       = {http://aclweb.org/anthology/C16-1073}
}

