@InProceedings{wang-ma:2017:EACLshort,
  author    = {Wang, Hsin-Yang  and  Ma, Wei-Yun},
  title     = {Integrating Semantic Knowledge into Lexical Embeddings Based on Information Content Measurement},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 2, Short Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {509--515},
  abstract  = {Distributional word representations are widely used in NLP tasks. These
	representations are based on an assumption that words with a similar context
	tend to have a similar meaning. To improve the quality of the context-based
	embeddings, many researches have explored how to make full use of existing
	lexical resources. In this paper, we argue that while we incorporate the prior
	knowledge with context-based embeddings, words with different occurrences
	should be treated differently. Therefore, we propose to rely on the measurement
	of information content to control the degree of applying prior knowledge into
	context-based embeddings - different words would have different learning rates
	when adjusting their embeddings. In the result, we demonstrate that our
	embeddings get significant improvements on two different tasks: Word Similarity
	and Analogical Reasoning.},
  url       = {http://www.aclweb.org/anthology/E17-2082}
}

