@inproceedings{kumar-sharma-2020-character,
title = "Character aware models with similarity learning for metaphor detection",
author = "Kumar, Tarun and
Sharma, Yashvardhan",
editor = "Klebanov, Beata Beigman and
Shutova, Ekaterina and
Lichtenstein, Patricia and
Muresan, Smaranda and
Wee, Chee and
Feldman, Anna and
Ghosh, Debanjan",
booktitle = "Proceedings of the Second Workshop on Figurative Language Processing",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.figlang-1.18",
doi = "10.18653/v1/2020.figlang-1.18",
pages = "116--125",
abstract = "Recent work on automatic sequential metaphor detection has involved recurrent neural networks initialized with different pre-trained word embeddings and which are sometimes combined with hand engineered features. To capture lexical and orthographic information automatically, in this paper we propose to add character based word representation. Also, to contrast the difference between literal and contextual meaning, we utilize a similarity network. We explore these components via two different architectures - a BiLSTM model and a Transformer Encoder model similar to BERT to perform metaphor identification. We participate in the Second Shared Task on Metaphor Detection on both the VUA and TOFEL datasets with the above models. The experimental results demonstrate the effectiveness of our method as it outperforms all the systems which participated in the previous shared task.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kumar-sharma-2020-character">
<titleInfo>
<title>Character aware models with similarity learning for metaphor detection</title>
</titleInfo>
<name type="personal">
<namePart type="given">Tarun</namePart>
<namePart type="family">Kumar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yashvardhan</namePart>
<namePart type="family">Sharma</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Second Workshop on Figurative Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Beata</namePart>
<namePart type="given">Beigman</namePart>
<namePart type="family">Klebanov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ekaterina</namePart>
<namePart type="family">Shutova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Patricia</namePart>
<namePart type="family">Lichtenstein</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Smaranda</namePart>
<namePart type="family">Muresan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chee</namePart>
<namePart type="family">Wee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Anna</namePart>
<namePart type="family">Feldman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Debanjan</namePart>
<namePart type="family">Ghosh</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Recent work on automatic sequential metaphor detection has involved recurrent neural networks initialized with different pre-trained word embeddings and which are sometimes combined with hand engineered features. To capture lexical and orthographic information automatically, in this paper we propose to add character based word representation. Also, to contrast the difference between literal and contextual meaning, we utilize a similarity network. We explore these components via two different architectures - a BiLSTM model and a Transformer Encoder model similar to BERT to perform metaphor identification. We participate in the Second Shared Task on Metaphor Detection on both the VUA and TOFEL datasets with the above models. The experimental results demonstrate the effectiveness of our method as it outperforms all the systems which participated in the previous shared task.</abstract>
<identifier type="citekey">kumar-sharma-2020-character</identifier>
<identifier type="doi">10.18653/v1/2020.figlang-1.18</identifier>
<location>
<url>https://aclanthology.org/2020.figlang-1.18</url>
</location>
<part>
<date>2020-07</date>
<extent unit="page">
<start>116</start>
<end>125</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Character aware models with similarity learning for metaphor detection
%A Kumar, Tarun
%A Sharma, Yashvardhan
%Y Klebanov, Beata Beigman
%Y Shutova, Ekaterina
%Y Lichtenstein, Patricia
%Y Muresan, Smaranda
%Y Wee, Chee
%Y Feldman, Anna
%Y Ghosh, Debanjan
%S Proceedings of the Second Workshop on Figurative Language Processing
%D 2020
%8 July
%I Association for Computational Linguistics
%C Online
%F kumar-sharma-2020-character
%X Recent work on automatic sequential metaphor detection has involved recurrent neural networks initialized with different pre-trained word embeddings and which are sometimes combined with hand engineered features. To capture lexical and orthographic information automatically, in this paper we propose to add character based word representation. Also, to contrast the difference between literal and contextual meaning, we utilize a similarity network. We explore these components via two different architectures - a BiLSTM model and a Transformer Encoder model similar to BERT to perform metaphor identification. We participate in the Second Shared Task on Metaphor Detection on both the VUA and TOFEL datasets with the above models. The experimental results demonstrate the effectiveness of our method as it outperforms all the systems which participated in the previous shared task.
%R 10.18653/v1/2020.figlang-1.18
%U https://aclanthology.org/2020.figlang-1.18
%U https://doi.org/10.18653/v1/2020.figlang-1.18
%P 116-125
Markdown (Informal)
[Character aware models with similarity learning for metaphor detection](https://aclanthology.org/2020.figlang-1.18) (Kumar & Sharma, Fig-Lang 2020)
ACL