@InProceedings{shi-EtAl:2017:K17-3,
  author    = {Shi, Tianze  and  Wu, Felix G.  and  Chen, Xilun  and  Cheng, Yao},
  title     = {Combining Global Models for Parsing Universal Dependencies},
  booktitle = {Proceedings of the CoNLL 2017 Shared Task: Multilingual Parsing from Raw Text to Universal Dependencies},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {31--39},
  abstract  = {We describe our entry, C2L2, to the CoNLL 2017 shared task on parsing Universal
	Dependencies from raw text. Our system features an ensemble of three global
	parsing paradigms, one graph-based and two transition-based. Each model
	leverages character-level bi-directional LSTMs as lexical feature extractors to
	encode morphological information. Though relying on baseline tokenizers and
	focusing only on parsing, our system ranked second in the official end-to-end
	evaluation with a macro-average of 75.00 LAS F1 score over 81 test treebanks.
	In addition, we had the top average performance on the four surprise languages
	and on the small treebank subset.},
  url       = {http://www.aclweb.org/anthology/K17-3003}
}

