@InProceedings{dasigi-EtAl:2017:Long,
  author    = {Dasigi, Pradeep  and  Ammar, Waleed  and  Dyer, Chris  and  Hovy, Eduard},
  title     = {Ontology-Aware Token Embeddings for Prepositional Phrase Attachment},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {2089--2098},
  abstract  = {Type-level word embeddings use the same set of parameters to represent all
	instances of a word regardless of its context, ignoring the inherent lexical
	ambiguity in language. Instead, we embed semantic concepts (or synsets) as
	defined in WordNet and represent a word token in a particular context by
	estimating a distribution over relevant semantic concepts. We use the new,
	context-sensitive embeddings in a model for predicting prepositional phrase
	(PP) attachments and jointly learn the concept embeddings and model parameters.
	We show that using context-sensitive embeddings improves the accuracy of the PP
	attachment model by 5.4% absolute points, which amounts to a 34.4% relative
	reduction in errors.},
  url       = {http://aclweb.org/anthology/P17-1191}
}

