@inproceedings{elks-2021-using-transfer,
title = "Using Transfer Learning to Automatically Mark {L}2 Writing Texts",
author = "Elks, Tim",
editor = "Djabri, Souhila and
Gimadi, Dinara and
Mihaylova, Tsvetomila and
Nikolova-Koleva, Ivelina",
booktitle = "Proceedings of the Student Research Workshop Associated with RANLP 2021",
month = sep,
year = "2021",
address = "Online",
publisher = "INCOMA Ltd.",
url = "https://aclanthology.org/2021.ranlp-srw.8",
pages = "51--57",
abstract = "The use of transfer learning in Natural Language Processing (NLP) has grown over the last few years. Large, pre-trained neural networks based on the Transformer architecture are one example of this, achieving state-of-theart performance on several commonly used performance benchmarks, often when finetuned on a downstream task. Another form of transfer learning, Multitask Learning, has also been shown to improve performance in Natural Language Processing tasks and increase model robustness. This paper outlines preliminary findings of investigations into the impact of using pretrained language models alongside multitask fine-tuning to create an automated marking system of second language learners{'} written English. Using multiple transformer models and multiple datasets, this study compares different combinations of models and tasks and evaluates their impact on the performance of an automated marking system This presentation is a snap-shot of work being conducted as part of my dissertation for the University of Wolverhampton{'}s Computational Linguistics Masters{'} programme.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="elks-2021-using-transfer">
<titleInfo>
<title>Using Transfer Learning to Automatically Mark L2 Writing Texts</title>
</titleInfo>
<name type="personal">
<namePart type="given">Tim</namePart>
<namePart type="family">Elks</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Student Research Workshop Associated with RANLP 2021</title>
</titleInfo>
<name type="personal">
<namePart type="given">Souhila</namePart>
<namePart type="family">Djabri</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dinara</namePart>
<namePart type="family">Gimadi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tsvetomila</namePart>
<namePart type="family">Mihaylova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ivelina</namePart>
<namePart type="family">Nikolova-Koleva</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>INCOMA Ltd.</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>The use of transfer learning in Natural Language Processing (NLP) has grown over the last few years. Large, pre-trained neural networks based on the Transformer architecture are one example of this, achieving state-of-theart performance on several commonly used performance benchmarks, often when finetuned on a downstream task. Another form of transfer learning, Multitask Learning, has also been shown to improve performance in Natural Language Processing tasks and increase model robustness. This paper outlines preliminary findings of investigations into the impact of using pretrained language models alongside multitask fine-tuning to create an automated marking system of second language learners’ written English. Using multiple transformer models and multiple datasets, this study compares different combinations of models and tasks and evaluates their impact on the performance of an automated marking system This presentation is a snap-shot of work being conducted as part of my dissertation for the University of Wolverhampton’s Computational Linguistics Masters’ programme.</abstract>
<identifier type="citekey">elks-2021-using-transfer</identifier>
<location>
<url>https://aclanthology.org/2021.ranlp-srw.8</url>
</location>
<part>
<date>2021-09</date>
<extent unit="page">
<start>51</start>
<end>57</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Using Transfer Learning to Automatically Mark L2 Writing Texts
%A Elks, Tim
%Y Djabri, Souhila
%Y Gimadi, Dinara
%Y Mihaylova, Tsvetomila
%Y Nikolova-Koleva, Ivelina
%S Proceedings of the Student Research Workshop Associated with RANLP 2021
%D 2021
%8 September
%I INCOMA Ltd.
%C Online
%F elks-2021-using-transfer
%X The use of transfer learning in Natural Language Processing (NLP) has grown over the last few years. Large, pre-trained neural networks based on the Transformer architecture are one example of this, achieving state-of-theart performance on several commonly used performance benchmarks, often when finetuned on a downstream task. Another form of transfer learning, Multitask Learning, has also been shown to improve performance in Natural Language Processing tasks and increase model robustness. This paper outlines preliminary findings of investigations into the impact of using pretrained language models alongside multitask fine-tuning to create an automated marking system of second language learners’ written English. Using multiple transformer models and multiple datasets, this study compares different combinations of models and tasks and evaluates their impact on the performance of an automated marking system This presentation is a snap-shot of work being conducted as part of my dissertation for the University of Wolverhampton’s Computational Linguistics Masters’ programme.
%U https://aclanthology.org/2021.ranlp-srw.8
%P 51-57
Markdown (Informal)
[Using Transfer Learning to Automatically Mark L2 Writing Texts](https://aclanthology.org/2021.ranlp-srw.8) (Elks, RANLP 2021)
ACL