@inproceedings{mohamad-zamani-etal-2022-xlnet,
title = "{XLNET}-{GRU} Sentiment Regression Model for Cryptocurrency News in {E}nglish and {M}alay",
author = "Mohamad Zamani, Nur Azmina and
Liew, Jasy Suet Yan and
Yusof, Ahmad Muhyiddin",
editor = "El-Haj, Mahmoud and
Rayson, Paul and
Zmandar, Nadhem",
booktitle = "Proceedings of the 4th Financial Narrative Processing Workshop @LREC2022",
month = jun,
year = "2022",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://aclanthology.org/2022.fnp-1.5",
pages = "36--42",
abstract = "Contextual word embeddings such as the transformer language models are gaining popularity in text classification and analytics but have rarely been explored for sentiment analysis on cryptocurrency news particularly on languages other than English. Various state-of-the-art (SOTA) pre-trained language models have been introduced recently such as BERT, ALBERT, ELECTRA, RoBERTa, and XLNet for text representation. Hence, this study aims to investigate the performance of using Gated Recurrent Unit (GRU) with Generalized Autoregressive Pretraining for Language (XLNet) contextual word embedding for sentiment analysis on English and Malay cryptocurrency news (Bitcoin and Ethereum). We also compare the performance of our XLNet-GRU model against other SOTA pre-trained language models. Manually labelled corpora of English and Malay news are utilized to learn the context of text specifically in the cryptocurrency domain. Based on our experiments, we found that our XLNet-GRU sentiment regression model outperformed the lexicon-based baseline with mean adjusted R2 = 0.631 across Bitcoin and Ethereum for English and mean adjusted R2 = 0.514 for Malay.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="mohamad-zamani-etal-2022-xlnet">
<titleInfo>
<title>XLNET-GRU Sentiment Regression Model for Cryptocurrency News in English and Malay</title>
</titleInfo>
<name type="personal">
<namePart type="given">Nur</namePart>
<namePart type="given">Azmina</namePart>
<namePart type="family">Mohamad Zamani</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jasy</namePart>
<namePart type="given">Suet</namePart>
<namePart type="given">Yan</namePart>
<namePart type="family">Liew</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ahmad</namePart>
<namePart type="given">Muhyiddin</namePart>
<namePart type="family">Yusof</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 4th Financial Narrative Processing Workshop @LREC2022</title>
</titleInfo>
<name type="personal">
<namePart type="given">Mahmoud</namePart>
<namePart type="family">El-Haj</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Paul</namePart>
<namePart type="family">Rayson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nadhem</namePart>
<namePart type="family">Zmandar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>European Language Resources Association</publisher>
<place>
<placeTerm type="text">Marseille, France</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Contextual word embeddings such as the transformer language models are gaining popularity in text classification and analytics but have rarely been explored for sentiment analysis on cryptocurrency news particularly on languages other than English. Various state-of-the-art (SOTA) pre-trained language models have been introduced recently such as BERT, ALBERT, ELECTRA, RoBERTa, and XLNet for text representation. Hence, this study aims to investigate the performance of using Gated Recurrent Unit (GRU) with Generalized Autoregressive Pretraining for Language (XLNet) contextual word embedding for sentiment analysis on English and Malay cryptocurrency news (Bitcoin and Ethereum). We also compare the performance of our XLNet-GRU model against other SOTA pre-trained language models. Manually labelled corpora of English and Malay news are utilized to learn the context of text specifically in the cryptocurrency domain. Based on our experiments, we found that our XLNet-GRU sentiment regression model outperformed the lexicon-based baseline with mean adjusted R2 = 0.631 across Bitcoin and Ethereum for English and mean adjusted R2 = 0.514 for Malay.</abstract>
<identifier type="citekey">mohamad-zamani-etal-2022-xlnet</identifier>
<location>
<url>https://aclanthology.org/2022.fnp-1.5</url>
</location>
<part>
<date>2022-06</date>
<extent unit="page">
<start>36</start>
<end>42</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T XLNET-GRU Sentiment Regression Model for Cryptocurrency News in English and Malay
%A Mohamad Zamani, Nur Azmina
%A Liew, Jasy Suet Yan
%A Yusof, Ahmad Muhyiddin
%Y El-Haj, Mahmoud
%Y Rayson, Paul
%Y Zmandar, Nadhem
%S Proceedings of the 4th Financial Narrative Processing Workshop @LREC2022
%D 2022
%8 June
%I European Language Resources Association
%C Marseille, France
%F mohamad-zamani-etal-2022-xlnet
%X Contextual word embeddings such as the transformer language models are gaining popularity in text classification and analytics but have rarely been explored for sentiment analysis on cryptocurrency news particularly on languages other than English. Various state-of-the-art (SOTA) pre-trained language models have been introduced recently such as BERT, ALBERT, ELECTRA, RoBERTa, and XLNet for text representation. Hence, this study aims to investigate the performance of using Gated Recurrent Unit (GRU) with Generalized Autoregressive Pretraining for Language (XLNet) contextual word embedding for sentiment analysis on English and Malay cryptocurrency news (Bitcoin and Ethereum). We also compare the performance of our XLNet-GRU model against other SOTA pre-trained language models. Manually labelled corpora of English and Malay news are utilized to learn the context of text specifically in the cryptocurrency domain. Based on our experiments, we found that our XLNet-GRU sentiment regression model outperformed the lexicon-based baseline with mean adjusted R2 = 0.631 across Bitcoin and Ethereum for English and mean adjusted R2 = 0.514 for Malay.
%U https://aclanthology.org/2022.fnp-1.5
%P 36-42
Markdown (Informal)
[XLNET-GRU Sentiment Regression Model for Cryptocurrency News in English and Malay](https://aclanthology.org/2022.fnp-1.5) (Mohamad Zamani et al., FNP 2022)
ACL