@inproceedings{tran-cheng-2018-multiplicative,
title = "Multiplicative Tree-Structured Long Short-Term Memory Networks for Semantic Representations",
author = "Tran, Nam Khanh and
Cheng, Weiwei",
editor = "Nissim, Malvina and
Berant, Jonathan and
Lenci, Alessandro",
booktitle = "Proceedings of the Seventh Joint Conference on Lexical and Computational Semantics",
month = jun,
year = "2018",
address = "New Orleans, Louisiana",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/S18-2032",
doi = "10.18653/v1/S18-2032",
pages = "276--286",
abstract = "Tree-structured LSTMs have shown advantages in learning semantic representations by exploiting syntactic information. Most existing methods model tree structures by bottom-up combinations of constituent nodes using the same shared compositional function and often making use of input word information only. The inability to capture the richness of compositionality makes these models lack expressive power. In this paper, we propose multiplicative tree-structured LSTMs to tackle this problem. Our model makes use of not only word information but also relation information between words. It is more expressive, as different combination functions can be used for each child node. In addition to syntactic trees, we also investigate the use of Abstract Meaning Representation in tree-structured models, in order to incorporate both syntactic and semantic information from the sentence. Experimental results on common NLP tasks show the proposed models lead to better sentence representation and AMR brings benefits in complex tasks.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="tran-cheng-2018-multiplicative">
<titleInfo>
<title>Multiplicative Tree-Structured Long Short-Term Memory Networks for Semantic Representations</title>
</titleInfo>
<name type="personal">
<namePart type="given">Nam</namePart>
<namePart type="given">Khanh</namePart>
<namePart type="family">Tran</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Weiwei</namePart>
<namePart type="family">Cheng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Seventh Joint Conference on Lexical and Computational Semantics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Malvina</namePart>
<namePart type="family">Nissim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jonathan</namePart>
<namePart type="family">Berant</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alessandro</namePart>
<namePart type="family">Lenci</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">New Orleans, Louisiana</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Tree-structured LSTMs have shown advantages in learning semantic representations by exploiting syntactic information. Most existing methods model tree structures by bottom-up combinations of constituent nodes using the same shared compositional function and often making use of input word information only. The inability to capture the richness of compositionality makes these models lack expressive power. In this paper, we propose multiplicative tree-structured LSTMs to tackle this problem. Our model makes use of not only word information but also relation information between words. It is more expressive, as different combination functions can be used for each child node. In addition to syntactic trees, we also investigate the use of Abstract Meaning Representation in tree-structured models, in order to incorporate both syntactic and semantic information from the sentence. Experimental results on common NLP tasks show the proposed models lead to better sentence representation and AMR brings benefits in complex tasks.</abstract>
<identifier type="citekey">tran-cheng-2018-multiplicative</identifier>
<identifier type="doi">10.18653/v1/S18-2032</identifier>
<location>
<url>https://aclanthology.org/S18-2032</url>
</location>
<part>
<date>2018-06</date>
<extent unit="page">
<start>276</start>
<end>286</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Multiplicative Tree-Structured Long Short-Term Memory Networks for Semantic Representations
%A Tran, Nam Khanh
%A Cheng, Weiwei
%Y Nissim, Malvina
%Y Berant, Jonathan
%Y Lenci, Alessandro
%S Proceedings of the Seventh Joint Conference on Lexical and Computational Semantics
%D 2018
%8 June
%I Association for Computational Linguistics
%C New Orleans, Louisiana
%F tran-cheng-2018-multiplicative
%X Tree-structured LSTMs have shown advantages in learning semantic representations by exploiting syntactic information. Most existing methods model tree structures by bottom-up combinations of constituent nodes using the same shared compositional function and often making use of input word information only. The inability to capture the richness of compositionality makes these models lack expressive power. In this paper, we propose multiplicative tree-structured LSTMs to tackle this problem. Our model makes use of not only word information but also relation information between words. It is more expressive, as different combination functions can be used for each child node. In addition to syntactic trees, we also investigate the use of Abstract Meaning Representation in tree-structured models, in order to incorporate both syntactic and semantic information from the sentence. Experimental results on common NLP tasks show the proposed models lead to better sentence representation and AMR brings benefits in complex tasks.
%R 10.18653/v1/S18-2032
%U https://aclanthology.org/S18-2032
%U https://doi.org/10.18653/v1/S18-2032
%P 276-286
Markdown (Informal)
[Multiplicative Tree-Structured Long Short-Term Memory Networks for Semantic Representations](https://aclanthology.org/S18-2032) (Tran & Cheng, *SEM 2018)
ACL