@InProceedings{luo-EtAl:2017:Long,
  author    = {Luo, Bingfeng  and  Feng, Yansong  and  Wang, Zheng  and  Zhu, Zhanxing  and  Huang, Songfang  and  Yan, Rui  and  Zhao, Dongyan},
  title     = {Learning with Noise: Enhance Distantly Supervised Relation Extraction with Dynamic Transition Matrix},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {430--439},
  abstract  = {Distant supervision significantly reduces human efforts in building training
	data for many classification tasks. While promising, this technique often
	introduces noise to the generated training data, which can severely affect the
	model performance. In this paper, we take a deep look at the application of
	distant supervision in relation extraction. We show that the dynamic transition
	matrix can effectively characterize the noise in the training data built by
	distant supervision. The transition matrix can be effectively trained using a
	novel curriculum learning based method without any direct supervision about the
	noise. We thoroughly evaluate our approach under a wide range of extraction
	scenarios. Experimental results show that our approach consistently improves
	the extraction results and outperforms the state-of-the-art in various
	evaluation scenarios.},
  url       = {http://aclweb.org/anthology/P17-1040}
}

