@InProceedings{yaghoobzadeh-schutze:2017:EACLlong,
  author    = {Yaghoobzadeh, Yadollah  and  Sch\"{u}tze, Hinrich},
  title     = {Multi-level Representations for Fine-Grained Typing of Knowledge Base Entities},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {578--589},
  abstract  = {Entities are essential elements of natural language. In this paper, we present
	methods for learning multi-level representations of entities on three
	complementary levels: character (character patterns in entity names extracted,
	e.g., by neural networks), word (embeddings of words in entity names) and
	entity (entity embeddings). We investigate state-of-the-art learning methods on
	each level and find large differences, e.g., for deep learning models,
	traditional ngram features and the subword model of fasttext (Bojanowski et
	al., 2016) on the character level; for word2vec (Mikolov et al., 2013) on the
	word level; and for the order-aware model wang2vec (Ling et al., 2015a) on the
	entity level. 
	We confirm experimentally that each level of representation contributes
	complementary information and a joint representation of all three levels
	improves the existing embedding based baseline for fine-grained entity typing
	by a large margin. Additionally, we show that adding information from entity
	descriptions further improves multi-level representations of entities.},
  url       = {http://www.aclweb.org/anthology/E17-1055}
}

