@InProceedings{wities-EtAl:2017:LSDSem,
  author    = {Wities, Rachel  and  Shwartz, Vered  and  Stanovsky, Gabriel  and  Adler, Meni  and  Shapira, Ori  and  Upadhyay, Shyam  and  Roth, Dan  and  Mart\'{i}nez-C\'{a}mara, Eugenio  and  Gurevych, Iryna  and  Dagan, Ido},
  title     = {A Consolidated Open Knowledge Representation for Multiple Texts},
  booktitle = {Proceedings of the 2nd Workshop on Linking Models of Lexical, Sentential and Discourse-level Semantics},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {12--24},
  abstract  = {We propose to move from Open Information Extraction (OIE) ahead to Open
	Knowledge Representation (OKR), aiming to represent information conveyed
	jointly in a set of texts in an open text- based manner. We do so by
	consolidating OIE extractions using entity and predicate coreference, while
	modeling information containment between coreferring elements via lexical
	entailment. We suggest that generating OKR structures can be a useful step in
	the NLP pipeline, to give semantic applications an easy handle on consolidated
	information across multiple texts.},
  url       = {http://aclweb.org/anthology/W17-0902}
}

