@InProceedings{do-bethard-moens:2016:COLING,
  author    = {Do, Quynh Ngoc Thi  and  Bethard, Steven  and  Moens, Marie-Francine},
  title     = {Facing the most difficult case of Semantic Role Labeling: A collaboration of word embeddings and co-training},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {1275--1284},
  abstract  = {We present a successful collaboration of word embeddings and co-training to
	tackle in the most difficult test case of semantic role labeling: predicting
	out-of-domain and unseen semantic frames. Despite the fact that co-training is
	a successful traditional semi-supervised method, its application in SRL is very
	limited especially when a huge amount of labeled data is available. In this
	work, co-training is used together with word embeddings to improve the
	performance of a system trained on a large training dataset. We also introduce
	a semantic role labeling system with a simple learning architecture and
	effective inference that is easily adaptable to semi-supervised settings with
	new training data and/or new features. On the out-of-domain testing set of the
	standard benchmark CoNLL 2009 data our simple approach achieves high
	performance and improves state-of-the-art results.},
  url       = {http://aclweb.org/anthology/C16-1121}
}

