@InProceedings{sorokin-gurevych:2017:EMNLP2017,
  author    = {Sorokin, Daniil  and  Gurevych, Iryna},
  title     = {Context-Aware Representations for Knowledge Base Relation Extraction},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1784--1789},
  abstract  = {We demonstrate that for sentence-level relation extraction it is beneficial to
	consider other relations in the sentential context while predicting the target
	relation. Our architecture uses an LSTM-based encoder to jointly learn
	representations for all relations in a single sentence.  We combine the context
	representations with an attention mechanism to make the final prediction. 
	We use the Wikidata knowledge base to construct a dataset of multiple relations
	per sentence and to evaluate our approach. Compared to a baseline system, our
	method results in an average error reduction of 24 on a held-out set of
	relations.
	The code and the dataset to replicate the experiments are made available at
	https://github.com/ukplab/.},
  url       = {https://www.aclweb.org/anthology/D17-1188}
}

