@inproceedings{baumann-2019-multilingual,
title = "Multilingual Language Models for Named Entity Recognition in {G}erman and {E}nglish",
author = "Baumann, Antonia",
editor = "Kovatchev, Venelin and
Temnikova, Irina and
{\v{S}}andrih, Branislava and
Nikolova, Ivelina",
booktitle = "Proceedings of the Student Research Workshop Associated with RANLP 2019",
month = sep,
year = "2019",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd.",
url = "https://aclanthology.org/R19-2004",
doi = "10.26615/issn.2603-2821.2019_004",
pages = "21--27",
abstract = "We assess the language specificity of recent language models by exploring the potential of a multilingual language model. In particular, we evaluate Google{'}s multilingual BERT (mBERT) model on Named Entity Recognition (NER) in German and English. We expand the work on language model fine-tuning by Howard and Ruder (2018), applying it to the BERT architecture. We successfully reproduce the NER results published by Devlin et al. (2019).Our results show that the multilingual language model generalises well for NER in the chosen languages, matching the native model in English and comparing well with recent approaches for German. However, it does not benefit from the added fine-tuning methods.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="baumann-2019-multilingual">
<titleInfo>
<title>Multilingual Language Models for Named Entity Recognition in German and English</title>
</titleInfo>
<name type="personal">
<namePart type="given">Antonia</namePart>
<namePart type="family">Baumann</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Student Research Workshop Associated with RANLP 2019</title>
</titleInfo>
<name type="personal">
<namePart type="given">Venelin</namePart>
<namePart type="family">Kovatchev</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Irina</namePart>
<namePart type="family">Temnikova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Branislava</namePart>
<namePart type="family">Šandrih</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ivelina</namePart>
<namePart type="family">Nikolova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>INCOMA Ltd.</publisher>
<place>
<placeTerm type="text">Varna, Bulgaria</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We assess the language specificity of recent language models by exploring the potential of a multilingual language model. In particular, we evaluate Google’s multilingual BERT (mBERT) model on Named Entity Recognition (NER) in German and English. We expand the work on language model fine-tuning by Howard and Ruder (2018), applying it to the BERT architecture. We successfully reproduce the NER results published by Devlin et al. (2019).Our results show that the multilingual language model generalises well for NER in the chosen languages, matching the native model in English and comparing well with recent approaches for German. However, it does not benefit from the added fine-tuning methods.</abstract>
<identifier type="citekey">baumann-2019-multilingual</identifier>
<identifier type="doi">10.26615/issn.2603-2821.2019_004</identifier>
<location>
<url>https://aclanthology.org/R19-2004</url>
</location>
<part>
<date>2019-09</date>
<extent unit="page">
<start>21</start>
<end>27</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Multilingual Language Models for Named Entity Recognition in German and English
%A Baumann, Antonia
%Y Kovatchev, Venelin
%Y Temnikova, Irina
%Y Šandrih, Branislava
%Y Nikolova, Ivelina
%S Proceedings of the Student Research Workshop Associated with RANLP 2019
%D 2019
%8 September
%I INCOMA Ltd.
%C Varna, Bulgaria
%F baumann-2019-multilingual
%X We assess the language specificity of recent language models by exploring the potential of a multilingual language model. In particular, we evaluate Google’s multilingual BERT (mBERT) model on Named Entity Recognition (NER) in German and English. We expand the work on language model fine-tuning by Howard and Ruder (2018), applying it to the BERT architecture. We successfully reproduce the NER results published by Devlin et al. (2019).Our results show that the multilingual language model generalises well for NER in the chosen languages, matching the native model in English and comparing well with recent approaches for German. However, it does not benefit from the added fine-tuning methods.
%R 10.26615/issn.2603-2821.2019_004
%U https://aclanthology.org/R19-2004
%U https://doi.org/10.26615/issn.2603-2821.2019_004
%P 21-27
Markdown (Informal)
[Multilingual Language Models for Named Entity Recognition in German and English](https://aclanthology.org/R19-2004) (Baumann, RANLP 2019)
ACL