@InProceedings{dehouck-denis:2017:EACLlong,
  author    = {Dehouck, Mathieu  and  Denis, Pascal},
  title     = {Delexicalized Word Embeddings for Cross-lingual Dependency Parsing},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {241--250},
  abstract  = {This paper presents a new approach to
	the problem of cross-lingual dependency
	parsing, aiming at leveraging training data
	from different source languages to learn
	a parser in a target language. Specifi-
	cally, this approach first constructs word
	vector representations that exploit struc-
	tural (i.e., dependency-based) contexts but
	only considering the morpho-syntactic in-
	formation associated with each word and
	its contexts. These delexicalized word em-
	beddings, which can be trained on any set
	of languages and capture features shared
	across languages, are then used in com-
	bination with standard language-specific
	features to train a lexicalized parser in the
	target language. We evaluate our approach
	through experiments on a set of eight dif-
	ferent languages that are part the Univer-
	sal Dependencies Project. Our main re-
	sults show that using such delexicalized
	embeddings, either trained in a monolin-
	gual or multilingual fashion, achieves sig-
	nificant improvements over monolingual
	baselines.},
  url       = {http://www.aclweb.org/anthology/E17-1023}
}

