@inproceedings{jameel-schockaert-2017-modeling,
title = "Modeling Context Words as Regions: An Ordinal Regression Approach to Word Embedding",
author = "Jameel, Shoaib and
Schockaert, Steven",
editor = "Levy, Roger and
Specia, Lucia",
booktitle = "Proceedings of the 21st Conference on Computational Natural Language Learning ({C}o{NLL} 2017)",
month = aug,
year = "2017",
address = "Vancouver, Canada",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/K17-1014",
doi = "10.18653/v1/K17-1014",
pages = "123--133",
abstract = "Vector representations of word meaning have found many applications in the field of natural language processing. Word vectors intuitively represent the average context in which a given word tends to occur, but they cannot explicitly model the diversity of these contexts. Although region representations of word meaning offer a natural alternative to word vectors, only few methods have been proposed that can effectively learn word regions. In this paper, we propose a new word embedding model which is based on SVM regression. We show that the underlying ranking interpretation of word contexts is sufficient to match, and sometimes outperform, the performance of popular methods such as Skip-gram. Furthermore, we show that by using a quadratic kernel, we can effectively learn word regions, which outperform existing unsupervised models for the task of hypernym detection.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="jameel-schockaert-2017-modeling">
<titleInfo>
<title>Modeling Context Words as Regions: An Ordinal Regression Approach to Word Embedding</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shoaib</namePart>
<namePart type="family">Jameel</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Steven</namePart>
<namePart type="family">Schockaert</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Roger</namePart>
<namePart type="family">Levy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lucia</namePart>
<namePart type="family">Specia</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Vancouver, Canada</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Vector representations of word meaning have found many applications in the field of natural language processing. Word vectors intuitively represent the average context in which a given word tends to occur, but they cannot explicitly model the diversity of these contexts. Although region representations of word meaning offer a natural alternative to word vectors, only few methods have been proposed that can effectively learn word regions. In this paper, we propose a new word embedding model which is based on SVM regression. We show that the underlying ranking interpretation of word contexts is sufficient to match, and sometimes outperform, the performance of popular methods such as Skip-gram. Furthermore, we show that by using a quadratic kernel, we can effectively learn word regions, which outperform existing unsupervised models for the task of hypernym detection.</abstract>
<identifier type="citekey">jameel-schockaert-2017-modeling</identifier>
<identifier type="doi">10.18653/v1/K17-1014</identifier>
<location>
<url>https://aclanthology.org/K17-1014</url>
</location>
<part>
<date>2017-08</date>
<extent unit="page">
<start>123</start>
<end>133</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Modeling Context Words as Regions: An Ordinal Regression Approach to Word Embedding
%A Jameel, Shoaib
%A Schockaert, Steven
%Y Levy, Roger
%Y Specia, Lucia
%S Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)
%D 2017
%8 August
%I Association for Computational Linguistics
%C Vancouver, Canada
%F jameel-schockaert-2017-modeling
%X Vector representations of word meaning have found many applications in the field of natural language processing. Word vectors intuitively represent the average context in which a given word tends to occur, but they cannot explicitly model the diversity of these contexts. Although region representations of word meaning offer a natural alternative to word vectors, only few methods have been proposed that can effectively learn word regions. In this paper, we propose a new word embedding model which is based on SVM regression. We show that the underlying ranking interpretation of word contexts is sufficient to match, and sometimes outperform, the performance of popular methods such as Skip-gram. Furthermore, we show that by using a quadratic kernel, we can effectively learn word regions, which outperform existing unsupervised models for the task of hypernym detection.
%R 10.18653/v1/K17-1014
%U https://aclanthology.org/K17-1014
%U https://doi.org/10.18653/v1/K17-1014
%P 123-133
Markdown (Informal)
[Modeling Context Words as Regions: An Ordinal Regression Approach to Word Embedding](https://aclanthology.org/K17-1014) (Jameel & Schockaert, CoNLL 2017)
ACL