@InProceedings{le-titov:2017:CoNLL,
  author    = {Le, Phong  and  Titov, Ivan},
  title     = {Optimizing Differentiable Relaxations of Coreference Evaluation Metrics},
  booktitle = {Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {390--399},
  abstract  = {Coreference evaluation metrics are hard to optimize directly as they are
	non-differentiable functions, not easily decomposable into elementary
	decisions. Consequently, most approaches optimize objectives only indirectly
	related to the end goal, resulting in suboptimal performance. Instead, we
	propose a differentiable relaxation that lends itself to gradient-based
	optimisation, thus bypassing the need for reinforcement learning or heuristic
	modification of cross-entropy. We show that by modifying the training objective
	of a competitive neural coreference system, we obtain a substantial gain in
	performance. This suggests that our approach can be regarded as a viable
	alternative to using reinforcement learning or more computationally expensive
	imitation learning.},
  url       = {http://aclweb.org/anthology/K17-1039}
}

