@InProceedings{cao-EtAl:2017:I17-1,
  author    = {Cao, Yixin  and  Shi, Jiaxin  and  Li, Juanzi  and  Liu, Zhiyuan  and  Li, Chengjiang},
  title     = {On Modeling Sense Relatedness in Multi-prototype Word Embedding},
  booktitle = {Proceedings of the Eighth International Joint Conference on Natural Language Processing (Volume 1: Long Papers)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {233--242},
  abstract  = {To enhance the expression ability of distributional word representation
	learning model, many researchers tend to induce word senses through clustering,
	and learn multiple embedding vectors for each word, namely multi-prototype word
	embedding model. However, most related work ignores the relatedness among word
	senses which actually plays an important role. In this paper, we propose a
	novel approach to capture word sense relatedness in multi-prototype word
	embedding model. Particularly, we differentiate the original sense and extended
	senses of a word by introducing their global occurrence information and model
	their relatedness through the local textual context information. Based on the
	idea of fuzzy clustering, we introduce a random process to integrate these two
	types of senses and design two non-parametric methods for word sense induction.
	To make our model more scalable and efficient, we use an online joint learning
	framework extended from the Skip-gram model. The experimental results
	demonstrate that our model outperforms both conventional single-prototype
	embedding models and other multi-prototype embedding models, and achieves more
	stable performance when trained on smaller data.},
  url       = {http://www.aclweb.org/anthology/I17-1024}
}

