@inproceedings{anni-etal-2022-eventbert,
title = "{E}vent{BERT}: Incorporating Event-based Semantics for Natural Language Understanding",
author = "Anni, Zou and
Zhuosheng, Zhang and
Hai, Zhao",
editor = "Sun, Maosong and
Liu, Yang and
Che, Wanxiang and
Feng, Yang and
Qiu, Xipeng and
Rao, Gaoqi and
Chen, Yubo",
booktitle = "Proceedings of the 21st Chinese National Conference on Computational Linguistics",
month = oct,
year = "2022",
address = "Nanchang, China",
publisher = "Chinese Information Processing Society of China",
url = "https://aclanthology.org/2022.ccl-1.69",
pages = "774--785",
abstract = "{``}Natural language understanding tasks require a comprehensive understanding of natural language and further reasoning about it, on the basis of holistic information at different levels to gain comprehensive knowledge. In recent years, pre-trained language models (PrLMs) have shown impressive performance in natural language understanding. However, they rely mainly on extracting context-sensitive statistical patterns without explicitly modeling linguistic information, such as semantic relationships entailed in natural language. In this work, we propose EventBERT, an event-based semantic representation model that takes BERT as the backbone and refines with event-based structural semantics in terms of graph convolution networks. EventBERT benefits simultaneously from rich event-based structures embodied in the graph and contextual semantics learned in pre-trained model BERT. Experimental results on the GLUE benchmark show that the proposed model consistently outperforms the baseline model.{''}",
language = "English",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="anni-etal-2022-eventbert">
<titleInfo>
<title>EventBERT: Incorporating Event-based Semantics for Natural Language Understanding</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zou</namePart>
<namePart type="family">Anni</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhang</namePart>
<namePart type="family">Zhuosheng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhao</namePart>
<namePart type="family">Hai</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-10</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 21st Chinese National Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Maosong</namePart>
<namePart type="family">Sun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wanxiang</namePart>
<namePart type="family">Che</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Feng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xipeng</namePart>
<namePart type="family">Qiu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gaoqi</namePart>
<namePart type="family">Rao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yubo</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Chinese Information Processing Society of China</publisher>
<place>
<placeTerm type="text">Nanchang, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>“Natural language understanding tasks require a comprehensive understanding of natural language and further reasoning about it, on the basis of holistic information at different levels to gain comprehensive knowledge. In recent years, pre-trained language models (PrLMs) have shown impressive performance in natural language understanding. However, they rely mainly on extracting context-sensitive statistical patterns without explicitly modeling linguistic information, such as semantic relationships entailed in natural language. In this work, we propose EventBERT, an event-based semantic representation model that takes BERT as the backbone and refines with event-based structural semantics in terms of graph convolution networks. EventBERT benefits simultaneously from rich event-based structures embodied in the graph and contextual semantics learned in pre-trained model BERT. Experimental results on the GLUE benchmark show that the proposed model consistently outperforms the baseline model.”</abstract>
<identifier type="citekey">anni-etal-2022-eventbert</identifier>
<location>
<url>https://aclanthology.org/2022.ccl-1.69</url>
</location>
<part>
<date>2022-10</date>
<extent unit="page">
<start>774</start>
<end>785</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T EventBERT: Incorporating Event-based Semantics for Natural Language Understanding
%A Anni, Zou
%A Zhuosheng, Zhang
%A Hai, Zhao
%Y Sun, Maosong
%Y Liu, Yang
%Y Che, Wanxiang
%Y Feng, Yang
%Y Qiu, Xipeng
%Y Rao, Gaoqi
%Y Chen, Yubo
%S Proceedings of the 21st Chinese National Conference on Computational Linguistics
%D 2022
%8 October
%I Chinese Information Processing Society of China
%C Nanchang, China
%G English
%F anni-etal-2022-eventbert
%X “Natural language understanding tasks require a comprehensive understanding of natural language and further reasoning about it, on the basis of holistic information at different levels to gain comprehensive knowledge. In recent years, pre-trained language models (PrLMs) have shown impressive performance in natural language understanding. However, they rely mainly on extracting context-sensitive statistical patterns without explicitly modeling linguistic information, such as semantic relationships entailed in natural language. In this work, we propose EventBERT, an event-based semantic representation model that takes BERT as the backbone and refines with event-based structural semantics in terms of graph convolution networks. EventBERT benefits simultaneously from rich event-based structures embodied in the graph and contextual semantics learned in pre-trained model BERT. Experimental results on the GLUE benchmark show that the proposed model consistently outperforms the baseline model.”
%U https://aclanthology.org/2022.ccl-1.69
%P 774-785
Markdown (Informal)
[EventBERT: Incorporating Event-based Semantics for Natural Language Understanding](https://aclanthology.org/2022.ccl-1.69) (Anni et al., CCL 2022)
ACL