@article{rothe-schutze-2017-autoextend,
title = "{A}uto{E}xtend: Combining Word Embeddings with Semantic Resources",
author = {Rothe, Sascha and
Sch{\"u}tze, Hinrich},
journal = "Computational Linguistics",
volume = "43",
number = "3",
month = sep,
year = "2017",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/J17-3004",
doi = "10.1162/COLI_a_00294",
pages = "593--617",
abstract = "We present AutoExtend, a system that combines word embeddings with semantic resources by learning embeddings for non-word objects like synsets and entities and learning word embeddings that incorporate the semantic information from the resource. The method is based on encoding and decoding the word embeddings and is flexible in that it can take any word embeddings as input and does not need an additional training corpus. The obtained embeddings live in the same vector space as the input word embeddings. A sparse tensor formalization guarantees efficiency and parallelizability. We use WordNet, GermaNet, and Freebase as semantic resources. AutoExtend achieves state-of-the-art performance on Word-in-Context Similarity and Word Sense Disambiguation tasks.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="rothe-schutze-2017-autoextend">
<titleInfo>
<title>AutoExtend: Combining Word Embeddings with Semantic Resources</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sascha</namePart>
<namePart type="family">Rothe</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hinrich</namePart>
<namePart type="family">Schütze</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>We present AutoExtend, a system that combines word embeddings with semantic resources by learning embeddings for non-word objects like synsets and entities and learning word embeddings that incorporate the semantic information from the resource. The method is based on encoding and decoding the word embeddings and is flexible in that it can take any word embeddings as input and does not need an additional training corpus. The obtained embeddings live in the same vector space as the input word embeddings. A sparse tensor formalization guarantees efficiency and parallelizability. We use WordNet, GermaNet, and Freebase as semantic resources. AutoExtend achieves state-of-the-art performance on Word-in-Context Similarity and Word Sense Disambiguation tasks.</abstract>
<identifier type="citekey">rothe-schutze-2017-autoextend</identifier>
<identifier type="doi">10.1162/COLI_a_00294</identifier>
<location>
<url>https://aclanthology.org/J17-3004</url>
</location>
<part>
<date>2017-09</date>
<detail type="volume"><number>43</number></detail>
<detail type="issue"><number>3</number></detail>
<extent unit="page">
<start>593</start>
<end>617</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T AutoExtend: Combining Word Embeddings with Semantic Resources
%A Rothe, Sascha
%A Schütze, Hinrich
%J Computational Linguistics
%D 2017
%8 September
%V 43
%N 3
%I MIT Press
%C Cambridge, MA
%F rothe-schutze-2017-autoextend
%X We present AutoExtend, a system that combines word embeddings with semantic resources by learning embeddings for non-word objects like synsets and entities and learning word embeddings that incorporate the semantic information from the resource. The method is based on encoding and decoding the word embeddings and is flexible in that it can take any word embeddings as input and does not need an additional training corpus. The obtained embeddings live in the same vector space as the input word embeddings. A sparse tensor formalization guarantees efficiency and parallelizability. We use WordNet, GermaNet, and Freebase as semantic resources. AutoExtend achieves state-of-the-art performance on Word-in-Context Similarity and Word Sense Disambiguation tasks.
%R 10.1162/COLI_a_00294
%U https://aclanthology.org/J17-3004
%U https://doi.org/10.1162/COLI_a_00294
%P 593-617
Markdown (Informal)
[AutoExtend: Combining Word Embeddings with Semantic Resources](https://aclanthology.org/J17-3004) (Rothe & Schütze, CL 2017)
ACL