@InProceedings{scholman-demberg:2017:LAW,
  author    = {Scholman, Merel  and  Demberg, Vera},
  title     = {Crowdsourcing discourse interpretations: On the influence of context and the reliability of a connective insertion task},
  booktitle = {Proceedings of the 11th Linguistic Annotation Workshop},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {24--33},
  abstract  = {Traditional discourse annotation tasks are considered costly and
	time-consuming, and the reliability and validity of these tasks is in question.
	In this paper, we investigate whether crowdsourcing can be used to obtain
	reliable discourse relation annotations. We also examine the influence of
	context on the reliability of the data. The results of a crowdsourced
	connective insertion task showed that the method can be used to obtain reliable
	annotations: The majority of the inserted connectives converged with the
	original label. Further, the method is sensitive to the fact that multiple
	senses can often be inferred for a single relation. Regarding the presence of
	context, the results show no significant difference in distributions of
	insertions between conditions overall. However, a by-item comparison revealed
	several characteristics of segments that determine whether the presence of
	context makes a difference in annotations. The findings discussed in this paper
	can be taken as evidence that crowdsourcing can be used as a valuable method to
	obtain insights into the sense(s) of relations.},
  url       = {http://www.aclweb.org/anthology/W17-0803}
}

