@article{jaech-ostendorf-2018-low,
title = "Low-Rank {RNN} Adaptation for Context-Aware Language Modeling",
author = "Jaech, Aaron and
Ostendorf, Mari",
editor = "Lee, Lillian and
Johnson, Mark and
Toutanova, Kristina and
Roark, Brian",
journal = "Transactions of the Association for Computational Linguistics",
volume = "6",
year = "2018",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/Q18-1035",
doi = "10.1162/tacl_a_00035",
pages = "497--510",
abstract = "A context-aware language model uses location, user and/or domain metadata (context) to adapt its predictions. In neural language models, context information is typically represented as an embedding and it is given to the RNN as an additional input, which has been shown to be useful in many applications. We introduce a more powerful mechanism for using context to adapt an RNN by letting the context vector control a low-rank transformation of the recurrent layer weight matrix. Experiments show that allowing a greater fraction of the model parameters to be adjusted has benefits in terms of perplexity and classification for several different types of context.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="jaech-ostendorf-2018-low">
<titleInfo>
<title>Low-Rank RNN Adaptation for Context-Aware Language Modeling</title>
</titleInfo>
<name type="personal">
<namePart type="given">Aaron</namePart>
<namePart type="family">Jaech</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mari</namePart>
<namePart type="family">Ostendorf</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Transactions of the Association for Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>A context-aware language model uses location, user and/or domain metadata (context) to adapt its predictions. In neural language models, context information is typically represented as an embedding and it is given to the RNN as an additional input, which has been shown to be useful in many applications. We introduce a more powerful mechanism for using context to adapt an RNN by letting the context vector control a low-rank transformation of the recurrent layer weight matrix. Experiments show that allowing a greater fraction of the model parameters to be adjusted has benefits in terms of perplexity and classification for several different types of context.</abstract>
<identifier type="citekey">jaech-ostendorf-2018-low</identifier>
<identifier type="doi">10.1162/tacl_a_00035</identifier>
<location>
<url>https://aclanthology.org/Q18-1035</url>
</location>
<part>
<date>2018</date>
<detail type="volume"><number>6</number></detail>
<extent unit="page">
<start>497</start>
<end>510</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Low-Rank RNN Adaptation for Context-Aware Language Modeling
%A Jaech, Aaron
%A Ostendorf, Mari
%J Transactions of the Association for Computational Linguistics
%D 2018
%V 6
%I MIT Press
%C Cambridge, MA
%F jaech-ostendorf-2018-low
%X A context-aware language model uses location, user and/or domain metadata (context) to adapt its predictions. In neural language models, context information is typically represented as an embedding and it is given to the RNN as an additional input, which has been shown to be useful in many applications. We introduce a more powerful mechanism for using context to adapt an RNN by letting the context vector control a low-rank transformation of the recurrent layer weight matrix. Experiments show that allowing a greater fraction of the model parameters to be adjusted has benefits in terms of perplexity and classification for several different types of context.
%R 10.1162/tacl_a_00035
%U https://aclanthology.org/Q18-1035
%U https://doi.org/10.1162/tacl_a_00035
%P 497-510
Markdown (Informal)
[Low-Rank RNN Adaptation for Context-Aware Language Modeling](https://aclanthology.org/Q18-1035) (Jaech & Ostendorf, TACL 2018)
ACL