@InProceedings{zhang-sun-wan:2017:CoNLL,
  author    = {Zhang, Xun  and  Sun, Weiwei  and  Wan, Xiaojun},
  title     = {The Covert Helps Parse the Overt},
  booktitle = {Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {343--353},
  abstract  = {This paper is concerned with whether
	deep syntactic information can help surface parsing, with a particular focus on
	empty categories. We design new algorithms to produce dependency trees in
	which empty elements are allowed, and
	evaluate the impact of information about
	empty category on parsing overt elements.
	Such information is helpful to reduce the
	approximation error in a structured parsing model, but increases the search
	space
	for inference and accordingly the estimation error. To deal with
	structure-based
	overfitting, we propose to integrate disambiguation models with and without
	empty
	elements, and perform structure regularization via joint decoding. Experiments
	on
	English and Chinese TreeBanks with different parsing models indicate that
	incorporating empty elements consistently improves surface parsing.},
  url       = {http://aclweb.org/anthology/K17-1035}
}

