@inproceedings{sorokin-gurevych-2017-context,
    title = "Context-Aware Representations for Knowledge Base Relation Extraction",
    author = "Sorokin, Daniil  and
      Gurevych, Iryna",
    editor = "Palmer, Martha  and
      Hwa, Rebecca  and
      Riedel, Sebastian",
    booktitle = "Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing",
    month = sep,
    year = "2017",
    address = "Copenhagen, Denmark",
    publisher = "Association for Computational Linguistics",
    url = "https://aclanthology.org/D17-1188/",
    doi = "10.18653/v1/D17-1188",
    pages = "1784--1789",
    abstract = "We demonstrate that for sentence-level relation extraction it is beneficial to consider other relations in the sentential context while predicting the target relation. Our architecture uses an LSTM-based encoder to jointly learn representations for all relations in a single sentence. We combine the context representations with an attention mechanism to make the final prediction. We use the Wikidata knowledge base to construct a dataset of multiple relations per sentence and to evaluate our approach. Compared to a baseline system, our method results in an average error reduction of 24 on a held-out set of relations. The code and the dataset to replicate the experiments are made available at \url{https://github.com/ukplab/}."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="sorokin-gurevych-2017-context">
    <titleInfo>
        <title>Context-Aware Representations for Knowledge Base Relation Extraction</title>
    </titleInfo>
    <name type="personal">
        <namePart type="given">Daniil</namePart>
        <namePart type="family">Sorokin</namePart>
        <role>
            <roleTerm authority="marcrelator" type="text">author</roleTerm>
        </role>
    </name>
    <name type="personal">
        <namePart type="given">Iryna</namePart>
        <namePart type="family">Gurevych</namePart>
        <role>
            <roleTerm authority="marcrelator" type="text">author</roleTerm>
        </role>
    </name>
    <originInfo>
        <dateIssued>2017-09</dateIssued>
    </originInfo>
    <typeOfResource>text</typeOfResource>
    <relatedItem type="host">
        <titleInfo>
            <title>Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing</title>
        </titleInfo>
        <name type="personal">
            <namePart type="given">Martha</namePart>
            <namePart type="family">Palmer</namePart>
            <role>
                <roleTerm authority="marcrelator" type="text">editor</roleTerm>
            </role>
        </name>
        <name type="personal">
            <namePart type="given">Rebecca</namePart>
            <namePart type="family">Hwa</namePart>
            <role>
                <roleTerm authority="marcrelator" type="text">editor</roleTerm>
            </role>
        </name>
        <name type="personal">
            <namePart type="given">Sebastian</namePart>
            <namePart type="family">Riedel</namePart>
            <role>
                <roleTerm authority="marcrelator" type="text">editor</roleTerm>
            </role>
        </name>
        <originInfo>
            <publisher>Association for Computational Linguistics</publisher>
            <place>
                <placeTerm type="text">Copenhagen, Denmark</placeTerm>
            </place>
        </originInfo>
        <genre authority="marcgt">conference publication</genre>
    </relatedItem>
    <abstract>We demonstrate that for sentence-level relation extraction it is beneficial to consider other relations in the sentential context while predicting the target relation. Our architecture uses an LSTM-based encoder to jointly learn representations for all relations in a single sentence. We combine the context representations with an attention mechanism to make the final prediction. We use the Wikidata knowledge base to construct a dataset of multiple relations per sentence and to evaluate our approach. Compared to a baseline system, our method results in an average error reduction of 24 on a held-out set of relations. The code and the dataset to replicate the experiments are made available at https://github.com/ukplab/.</abstract>
    <identifier type="citekey">sorokin-gurevych-2017-context</identifier>
    <identifier type="doi">10.18653/v1/D17-1188</identifier>
    <location>
        <url>https://aclanthology.org/D17-1188/</url>
    </location>
    <part>
        <date>2017-09</date>
        <extent unit="page">
            <start>1784</start>
            <end>1789</end>
        </extent>
    </part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Context-Aware Representations for Knowledge Base Relation Extraction
%A Sorokin, Daniil
%A Gurevych, Iryna
%Y Palmer, Martha
%Y Hwa, Rebecca
%Y Riedel, Sebastian
%S Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing
%D 2017
%8 September
%I Association for Computational Linguistics
%C Copenhagen, Denmark
%F sorokin-gurevych-2017-context
%X We demonstrate that for sentence-level relation extraction it is beneficial to consider other relations in the sentential context while predicting the target relation. Our architecture uses an LSTM-based encoder to jointly learn representations for all relations in a single sentence. We combine the context representations with an attention mechanism to make the final prediction. We use the Wikidata knowledge base to construct a dataset of multiple relations per sentence and to evaluate our approach. Compared to a baseline system, our method results in an average error reduction of 24 on a held-out set of relations. The code and the dataset to replicate the experiments are made available at https://github.com/ukplab/.
%R 10.18653/v1/D17-1188
%U https://aclanthology.org/D17-1188/
%U https://doi.org/10.18653/v1/D17-1188
%P 1784-1789
Markdown (Informal)
[Context-Aware Representations for Knowledge Base Relation Extraction](https://aclanthology.org/D17-1188/) (Sorokin & Gurevych, EMNLP 2017)
ACL