@inproceedings{behnamghader-etal-2021-mg,
title = "{MG}-{BERT}: Multi-Graph Augmented {BERT} for Masked Language Modeling",
author = "BehnamGhader, Parishad and
Zakerinia, Hossein and
Soleymani Baghshah, Mahdieh",
editor = "Panchenko, Alexander and
Malliaros, Fragkiskos D. and
Logacheva, Varvara and
Jana, Abhik and
Ustalov, Dmitry and
Jansen, Peter",
booktitle = "Proceedings of the Fifteenth Workshop on Graph-Based Methods for Natural Language Processing (TextGraphs-15)",
month = jun,
year = "2021",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.textgraphs-1.12",
doi = "10.18653/v1/2021.textgraphs-1.12",
pages = "125--131",
abstract = "Pre-trained models like Bidirectional Encoder Representations from Transformers (BERT), have recently made a big leap forward in Natural Language Processing (NLP) tasks. However, there are still some shortcomings in the Masked Language Modeling (MLM) task performed by these models. In this paper, we first introduce a multi-graph including different types of relations between words. Then, we propose Multi-Graph augmented BERT (MG-BERT) model that is based on BERT. MG-BERT embeds tokens while taking advantage of a static multi-graph containing global word co-occurrences in the text corpus beside global real-world facts about words in knowledge graphs. The proposed model also employs a dynamic sentence graph to capture local context effectively. Experimental results demonstrate that our model can considerably enhance the performance in the MLM task.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="behnamghader-etal-2021-mg">
<titleInfo>
<title>MG-BERT: Multi-Graph Augmented BERT for Masked Language Modeling</title>
</titleInfo>
<name type="personal">
<namePart type="given">Parishad</namePart>
<namePart type="family">BehnamGhader</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hossein</namePart>
<namePart type="family">Zakerinia</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mahdieh</namePart>
<namePart type="family">Soleymani Baghshah</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifteenth Workshop on Graph-Based Methods for Natural Language Processing (TextGraphs-15)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Alexander</namePart>
<namePart type="family">Panchenko</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Fragkiskos</namePart>
<namePart type="given">D</namePart>
<namePart type="family">Malliaros</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Varvara</namePart>
<namePart type="family">Logacheva</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Abhik</namePart>
<namePart type="family">Jana</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dmitry</namePart>
<namePart type="family">Ustalov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Peter</namePart>
<namePart type="family">Jansen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Mexico City, Mexico</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Pre-trained models like Bidirectional Encoder Representations from Transformers (BERT), have recently made a big leap forward in Natural Language Processing (NLP) tasks. However, there are still some shortcomings in the Masked Language Modeling (MLM) task performed by these models. In this paper, we first introduce a multi-graph including different types of relations between words. Then, we propose Multi-Graph augmented BERT (MG-BERT) model that is based on BERT. MG-BERT embeds tokens while taking advantage of a static multi-graph containing global word co-occurrences in the text corpus beside global real-world facts about words in knowledge graphs. The proposed model also employs a dynamic sentence graph to capture local context effectively. Experimental results demonstrate that our model can considerably enhance the performance in the MLM task.</abstract>
<identifier type="citekey">behnamghader-etal-2021-mg</identifier>
<identifier type="doi">10.18653/v1/2021.textgraphs-1.12</identifier>
<location>
<url>https://aclanthology.org/2021.textgraphs-1.12</url>
</location>
<part>
<date>2021-06</date>
<extent unit="page">
<start>125</start>
<end>131</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T MG-BERT: Multi-Graph Augmented BERT for Masked Language Modeling
%A BehnamGhader, Parishad
%A Zakerinia, Hossein
%A Soleymani Baghshah, Mahdieh
%Y Panchenko, Alexander
%Y Malliaros, Fragkiskos D.
%Y Logacheva, Varvara
%Y Jana, Abhik
%Y Ustalov, Dmitry
%Y Jansen, Peter
%S Proceedings of the Fifteenth Workshop on Graph-Based Methods for Natural Language Processing (TextGraphs-15)
%D 2021
%8 June
%I Association for Computational Linguistics
%C Mexico City, Mexico
%F behnamghader-etal-2021-mg
%X Pre-trained models like Bidirectional Encoder Representations from Transformers (BERT), have recently made a big leap forward in Natural Language Processing (NLP) tasks. However, there are still some shortcomings in the Masked Language Modeling (MLM) task performed by these models. In this paper, we first introduce a multi-graph including different types of relations between words. Then, we propose Multi-Graph augmented BERT (MG-BERT) model that is based on BERT. MG-BERT embeds tokens while taking advantage of a static multi-graph containing global word co-occurrences in the text corpus beside global real-world facts about words in knowledge graphs. The proposed model also employs a dynamic sentence graph to capture local context effectively. Experimental results demonstrate that our model can considerably enhance the performance in the MLM task.
%R 10.18653/v1/2021.textgraphs-1.12
%U https://aclanthology.org/2021.textgraphs-1.12
%U https://doi.org/10.18653/v1/2021.textgraphs-1.12
%P 125-131
Markdown (Informal)
[MG-BERT: Multi-Graph Augmented BERT for Masked Language Modeling](https://aclanthology.org/2021.textgraphs-1.12) (BehnamGhader et al., TextGraphs 2021)
ACL
- Parishad BehnamGhader, Hossein Zakerinia, and Mahdieh Soleymani Baghshah. 2021. MG-BERT: Multi-Graph Augmented BERT for Masked Language Modeling. In Proceedings of the Fifteenth Workshop on Graph-Based Methods for Natural Language Processing (TextGraphs-15), pages 125–131, Mexico City, Mexico. Association for Computational Linguistics.