@inproceedings{min-2021-exploring,
title = "Exploring Pre-Trained Transformers and Bilingual Transfer Learning for {A}rabic Coreference Resolution",
author = "Min, Bonan",
editor = "Ogrodniczuk, Maciej and
Pradhan, Sameer and
Poesio, Massimo and
Grishina, Yulia and
Ng, Vincent",
booktitle = "Proceedings of the Fourth Workshop on Computational Models of Reference, Anaphora and Coreference",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.crac-1.10",
doi = "10.18653/v1/2021.crac-1.10",
pages = "94--99",
abstract = "In this paper, we develop bilingual transfer learning approaches to improve Arabic coreference resolution by leveraging additional English annotation via bilingual or multilingual pre-trained transformers. We show that bilingual transfer learning improves the strong transformer-based neural coreference models by 2-4 F1. We also systemically investigate the effectiveness of several pre-trained transformer models that differ in training corpora, languages covered, and model capacity. Our best model achieves a new state-of-the-art performance of 64.55 F1 on the Arabic OntoNotes dataset. Our code is publicly available at \url{https://github.com/bnmin/arabic_coref}.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="min-2021-exploring">
<titleInfo>
<title>Exploring Pre-Trained Transformers and Bilingual Transfer Learning for Arabic Coreference Resolution</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bonan</namePart>
<namePart type="family">Min</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fourth Workshop on Computational Models of Reference, Anaphora and Coreference</title>
</titleInfo>
<name type="personal">
<namePart type="given">Maciej</namePart>
<namePart type="family">Ogrodniczuk</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sameer</namePart>
<namePart type="family">Pradhan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Massimo</namePart>
<namePart type="family">Poesio</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yulia</namePart>
<namePart type="family">Grishina</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vincent</namePart>
<namePart type="family">Ng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Punta Cana, Dominican Republic</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper, we develop bilingual transfer learning approaches to improve Arabic coreference resolution by leveraging additional English annotation via bilingual or multilingual pre-trained transformers. We show that bilingual transfer learning improves the strong transformer-based neural coreference models by 2-4 F1. We also systemically investigate the effectiveness of several pre-trained transformer models that differ in training corpora, languages covered, and model capacity. Our best model achieves a new state-of-the-art performance of 64.55 F1 on the Arabic OntoNotes dataset. Our code is publicly available at https://github.com/bnmin/arabic_coref.</abstract>
<identifier type="citekey">min-2021-exploring</identifier>
<identifier type="doi">10.18653/v1/2021.crac-1.10</identifier>
<location>
<url>https://aclanthology.org/2021.crac-1.10</url>
</location>
<part>
<date>2021-11</date>
<extent unit="page">
<start>94</start>
<end>99</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Exploring Pre-Trained Transformers and Bilingual Transfer Learning for Arabic Coreference Resolution
%A Min, Bonan
%Y Ogrodniczuk, Maciej
%Y Pradhan, Sameer
%Y Poesio, Massimo
%Y Grishina, Yulia
%Y Ng, Vincent
%S Proceedings of the Fourth Workshop on Computational Models of Reference, Anaphora and Coreference
%D 2021
%8 November
%I Association for Computational Linguistics
%C Punta Cana, Dominican Republic
%F min-2021-exploring
%X In this paper, we develop bilingual transfer learning approaches to improve Arabic coreference resolution by leveraging additional English annotation via bilingual or multilingual pre-trained transformers. We show that bilingual transfer learning improves the strong transformer-based neural coreference models by 2-4 F1. We also systemically investigate the effectiveness of several pre-trained transformer models that differ in training corpora, languages covered, and model capacity. Our best model achieves a new state-of-the-art performance of 64.55 F1 on the Arabic OntoNotes dataset. Our code is publicly available at https://github.com/bnmin/arabic_coref.
%R 10.18653/v1/2021.crac-1.10
%U https://aclanthology.org/2021.crac-1.10
%U https://doi.org/10.18653/v1/2021.crac-1.10
%P 94-99
Markdown (Informal)
[Exploring Pre-Trained Transformers and Bilingual Transfer Learning for Arabic Coreference Resolution](https://aclanthology.org/2021.crac-1.10) (Min, CRAC 2021)
ACL