@inproceedings{kabdolov-etal-2018-reproducing,
title = "Reproducing and Regularizing the {SCRN} Model",
author = "Kabdolov, Olzhas and
Assylbekov, Zhenisbek and
Takhanov, Rustem",
editor = "Bender, Emily M. and
Derczynski, Leon and
Isabelle, Pierre",
booktitle = "Proceedings of the 27th International Conference on Computational Linguistics",
month = aug,
year = "2018",
address = "Santa Fe, New Mexico, USA",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/C18-1145",
pages = "1705--1716",
abstract = "We reproduce the Structurally Constrained Recurrent Network (SCRN) model, and then regularize it using the existing widespread techniques, such as naive dropout, variational dropout, and weight tying. We show that when regularized and optimized appropriately the SCRN model can achieve performance comparable with the ubiquitous LSTM model in language modeling task on English data, while outperforming it on non-English data.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kabdolov-etal-2018-reproducing">
<titleInfo>
<title>Reproducing and Regularizing the SCRN Model</title>
</titleInfo>
<name type="personal">
<namePart type="given">Olzhas</namePart>
<namePart type="family">Kabdolov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhenisbek</namePart>
<namePart type="family">Assylbekov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Rustem</namePart>
<namePart type="family">Takhanov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 27th International Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Emily</namePart>
<namePart type="given">M</namePart>
<namePart type="family">Bender</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Leon</namePart>
<namePart type="family">Derczynski</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pierre</namePart>
<namePart type="family">Isabelle</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Santa Fe, New Mexico, USA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We reproduce the Structurally Constrained Recurrent Network (SCRN) model, and then regularize it using the existing widespread techniques, such as naive dropout, variational dropout, and weight tying. We show that when regularized and optimized appropriately the SCRN model can achieve performance comparable with the ubiquitous LSTM model in language modeling task on English data, while outperforming it on non-English data.</abstract>
<identifier type="citekey">kabdolov-etal-2018-reproducing</identifier>
<location>
<url>https://aclanthology.org/C18-1145</url>
</location>
<part>
<date>2018-08</date>
<extent unit="page">
<start>1705</start>
<end>1716</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Reproducing and Regularizing the SCRN Model
%A Kabdolov, Olzhas
%A Assylbekov, Zhenisbek
%A Takhanov, Rustem
%Y Bender, Emily M.
%Y Derczynski, Leon
%Y Isabelle, Pierre
%S Proceedings of the 27th International Conference on Computational Linguistics
%D 2018
%8 August
%I Association for Computational Linguistics
%C Santa Fe, New Mexico, USA
%F kabdolov-etal-2018-reproducing
%X We reproduce the Structurally Constrained Recurrent Network (SCRN) model, and then regularize it using the existing widespread techniques, such as naive dropout, variational dropout, and weight tying. We show that when regularized and optimized appropriately the SCRN model can achieve performance comparable with the ubiquitous LSTM model in language modeling task on English data, while outperforming it on non-English data.
%U https://aclanthology.org/C18-1145
%P 1705-1716
Markdown (Informal)
[Reproducing and Regularizing the SCRN Model](https://aclanthology.org/C18-1145) (Kabdolov et al., COLING 2018)
ACL
- Olzhas Kabdolov, Zhenisbek Assylbekov, and Rustem Takhanov. 2018. Reproducing and Regularizing the SCRN Model. In Proceedings of the 27th International Conference on Computational Linguistics, pages 1705–1716, Santa Fe, New Mexico, USA. Association for Computational Linguistics.