@inproceedings{yuan-2018-neural,
title = "Neural sequence modelling for learner error prediction",
author = "Yuan, Zheng",
editor = "Tetreault, Joel and
Burstein, Jill and
Kochmar, Ekaterina and
Leacock, Claudia and
Yannakoudakis, Helen",
booktitle = "Proceedings of the Thirteenth Workshop on Innovative Use of {NLP} for Building Educational Applications",
month = jun,
year = "2018",
address = "New Orleans, Louisiana",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-0547",
doi = "10.18653/v1/W18-0547",
pages = "381--388",
abstract = "This paper describes our use of two recurrent neural network sequence models: sequence labelling and sequence-to-sequence models, for the prediction of future learner errors in our submission to the 2018 Duolingo Shared Task on Second Language Acquisition Modeling (SLAM). We show that these two models capture complementary information as combining them improves performance. Furthermore, the same network architecture and group of features can be used directly to build competitive prediction models in all three language tracks, demonstrating that our approach generalises well across languages.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="yuan-2018-neural">
<titleInfo>
<title>Neural sequence modelling for learner error prediction</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zheng</namePart>
<namePart type="family">Yuan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Thirteenth Workshop on Innovative Use of NLP for Building Educational Applications</title>
</titleInfo>
<name type="personal">
<namePart type="given">Joel</namePart>
<namePart type="family">Tetreault</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jill</namePart>
<namePart type="family">Burstein</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ekaterina</namePart>
<namePart type="family">Kochmar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Claudia</namePart>
<namePart type="family">Leacock</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Helen</namePart>
<namePart type="family">Yannakoudakis</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">New Orleans, Louisiana</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes our use of two recurrent neural network sequence models: sequence labelling and sequence-to-sequence models, for the prediction of future learner errors in our submission to the 2018 Duolingo Shared Task on Second Language Acquisition Modeling (SLAM). We show that these two models capture complementary information as combining them improves performance. Furthermore, the same network architecture and group of features can be used directly to build competitive prediction models in all three language tracks, demonstrating that our approach generalises well across languages.</abstract>
<identifier type="citekey">yuan-2018-neural</identifier>
<identifier type="doi">10.18653/v1/W18-0547</identifier>
<location>
<url>https://aclanthology.org/W18-0547</url>
</location>
<part>
<date>2018-06</date>
<extent unit="page">
<start>381</start>
<end>388</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Neural sequence modelling for learner error prediction
%A Yuan, Zheng
%Y Tetreault, Joel
%Y Burstein, Jill
%Y Kochmar, Ekaterina
%Y Leacock, Claudia
%Y Yannakoudakis, Helen
%S Proceedings of the Thirteenth Workshop on Innovative Use of NLP for Building Educational Applications
%D 2018
%8 June
%I Association for Computational Linguistics
%C New Orleans, Louisiana
%F yuan-2018-neural
%X This paper describes our use of two recurrent neural network sequence models: sequence labelling and sequence-to-sequence models, for the prediction of future learner errors in our submission to the 2018 Duolingo Shared Task on Second Language Acquisition Modeling (SLAM). We show that these two models capture complementary information as combining them improves performance. Furthermore, the same network architecture and group of features can be used directly to build competitive prediction models in all three language tracks, demonstrating that our approach generalises well across languages.
%R 10.18653/v1/W18-0547
%U https://aclanthology.org/W18-0547
%U https://doi.org/10.18653/v1/W18-0547
%P 381-388
Markdown (Informal)
[Neural sequence modelling for learner error prediction](https://aclanthology.org/W18-0547) (Yuan, BEA 2018)
ACL