@inproceedings{peng-etal-2021-domain,
title = "Is Domain Adaptation Worth Your Investment? Comparing {BERT} and {F}in{BERT} on Financial Tasks",
author = "Peng, Bo and
Chersoni, Emmanuele and
Hsu, Yu-Yin and
Huang, Chu-Ren",
editor = "Hahn, Udo and
Hoste, Veronique and
Stent, Amanda",
booktitle = "Proceedings of the Third Workshop on Economics and Natural Language Processing",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.econlp-1.5",
doi = "10.18653/v1/2021.econlp-1.5",
pages = "37--44",
abstract = "With the recent rise in popularity of Transformer models in Natural Language Processing, research efforts have been dedicated to the development of domain-adapted versions of BERT-like architectures. In this study, we focus on FinBERT, a Transformer model trained on text from the financial domain. By comparing its performances with the original BERT on a wide variety of financial text processing tasks, we found continual pretraining from the original model to be the more beneficial option. Domain-specific pretraining from scratch, conversely, seems to be less effective.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="peng-etal-2021-domain">
<titleInfo>
<title>Is Domain Adaptation Worth Your Investment? Comparing BERT and FinBERT on Financial Tasks</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bo</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Emmanuele</namePart>
<namePart type="family">Chersoni</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yu-Yin</namePart>
<namePart type="family">Hsu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chu-Ren</namePart>
<namePart type="family">Huang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Third Workshop on Economics and Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Udo</namePart>
<namePart type="family">Hahn</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Veronique</namePart>
<namePart type="family">Hoste</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amanda</namePart>
<namePart type="family">Stent</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Punta Cana, Dominican Republic</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>With the recent rise in popularity of Transformer models in Natural Language Processing, research efforts have been dedicated to the development of domain-adapted versions of BERT-like architectures. In this study, we focus on FinBERT, a Transformer model trained on text from the financial domain. By comparing its performances with the original BERT on a wide variety of financial text processing tasks, we found continual pretraining from the original model to be the more beneficial option. Domain-specific pretraining from scratch, conversely, seems to be less effective.</abstract>
<identifier type="citekey">peng-etal-2021-domain</identifier>
<identifier type="doi">10.18653/v1/2021.econlp-1.5</identifier>
<location>
<url>https://aclanthology.org/2021.econlp-1.5</url>
</location>
<part>
<date>2021-11</date>
<extent unit="page">
<start>37</start>
<end>44</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Is Domain Adaptation Worth Your Investment? Comparing BERT and FinBERT on Financial Tasks
%A Peng, Bo
%A Chersoni, Emmanuele
%A Hsu, Yu-Yin
%A Huang, Chu-Ren
%Y Hahn, Udo
%Y Hoste, Veronique
%Y Stent, Amanda
%S Proceedings of the Third Workshop on Economics and Natural Language Processing
%D 2021
%8 November
%I Association for Computational Linguistics
%C Punta Cana, Dominican Republic
%F peng-etal-2021-domain
%X With the recent rise in popularity of Transformer models in Natural Language Processing, research efforts have been dedicated to the development of domain-adapted versions of BERT-like architectures. In this study, we focus on FinBERT, a Transformer model trained on text from the financial domain. By comparing its performances with the original BERT on a wide variety of financial text processing tasks, we found continual pretraining from the original model to be the more beneficial option. Domain-specific pretraining from scratch, conversely, seems to be less effective.
%R 10.18653/v1/2021.econlp-1.5
%U https://aclanthology.org/2021.econlp-1.5
%U https://doi.org/10.18653/v1/2021.econlp-1.5
%P 37-44
Markdown (Informal)
[Is Domain Adaptation Worth Your Investment? Comparing BERT and FinBERT on Financial Tasks](https://aclanthology.org/2021.econlp-1.5) (Peng et al., ECONLP 2021)
ACL