@inproceedings{guo-etal-2021-pre,
title = "Pre-trained Transformer-based Classification and Span Detection Models for Social Media Health Applications",
author = "Guo, Yuting and
Ge, Yao and
Ali Al-Garadi, Mohammed and
Sarker, Abeed",
booktitle = "Proceedings of the Sixth Social Media Mining for Health ({\#}SMM4H) Workshop and Shared Task",
month = jun,
year = "2021",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.smm4h-1.8",
doi = "10.18653/v1/2021.smm4h-1.8",
pages = "52--57",
abstract = "This paper describes our approach for six classification tasks (Tasks 1a, 3a, 3b, 4 and 5) and one span detection task (Task 1b) from the Social Media Mining for Health (SMM4H) 2021 shared tasks. We developed two separate systems for classification and span detection, both based on pre-trained Transformer-based models. In addition, we applied oversampling and classifier ensembling in the classification tasks. The results of our submissions are over the median scores in all tasks except for Task 1a. Furthermore, our model achieved first place in Task 4 and obtained a 7{\%} higher F1-score than the median in Task 1b.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="guo-etal-2021-pre">
<titleInfo>
<title>Pre-trained Transformer-based Classification and Span Detection Models for Social Media Health Applications</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yuting</namePart>
<namePart type="family">Guo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yao</namePart>
<namePart type="family">Ge</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mohammed</namePart>
<namePart type="family">Ali Al-Garadi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Abeed</namePart>
<namePart type="family">Sarker</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Sixth Social Media Mining for Health (#SMM4H) Workshop and Shared Task</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Mexico City, Mexico</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes our approach for six classification tasks (Tasks 1a, 3a, 3b, 4 and 5) and one span detection task (Task 1b) from the Social Media Mining for Health (SMM4H) 2021 shared tasks. We developed two separate systems for classification and span detection, both based on pre-trained Transformer-based models. In addition, we applied oversampling and classifier ensembling in the classification tasks. The results of our submissions are over the median scores in all tasks except for Task 1a. Furthermore, our model achieved first place in Task 4 and obtained a 7% higher F1-score than the median in Task 1b.</abstract>
<identifier type="citekey">guo-etal-2021-pre</identifier>
<identifier type="doi">10.18653/v1/2021.smm4h-1.8</identifier>
<location>
<url>https://aclanthology.org/2021.smm4h-1.8</url>
</location>
<part>
<date>2021-06</date>
<extent unit="page">
<start>52</start>
<end>57</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Pre-trained Transformer-based Classification and Span Detection Models for Social Media Health Applications
%A Guo, Yuting
%A Ge, Yao
%A Ali Al-Garadi, Mohammed
%A Sarker, Abeed
%S Proceedings of the Sixth Social Media Mining for Health (#SMM4H) Workshop and Shared Task
%D 2021
%8 June
%I Association for Computational Linguistics
%C Mexico City, Mexico
%F guo-etal-2021-pre
%X This paper describes our approach for six classification tasks (Tasks 1a, 3a, 3b, 4 and 5) and one span detection task (Task 1b) from the Social Media Mining for Health (SMM4H) 2021 shared tasks. We developed two separate systems for classification and span detection, both based on pre-trained Transformer-based models. In addition, we applied oversampling and classifier ensembling in the classification tasks. The results of our submissions are over the median scores in all tasks except for Task 1a. Furthermore, our model achieved first place in Task 4 and obtained a 7% higher F1-score than the median in Task 1b.
%R 10.18653/v1/2021.smm4h-1.8
%U https://aclanthology.org/2021.smm4h-1.8
%U https://doi.org/10.18653/v1/2021.smm4h-1.8
%P 52-57
Markdown (Informal)
[Pre-trained Transformer-based Classification and Span Detection Models for Social Media Health Applications](https://aclanthology.org/2021.smm4h-1.8) (Guo et al., SMM4H 2021)
ACL