@inproceedings{nguyen-etal-2021-cross-domain,
title = "Cross-Domain Language Modeling: An Empirical Investigation",
author = "Nguyen, Vincent and
Karimi, Sarvnaz and
Rybinski, Maciej and
Xing, Zhenchang",
editor = "Rahimi, Afshin and
Lane, William and
Zuccon, Guido",
booktitle = "Proceedings of the 19th Annual Workshop of the Australasian Language Technology Association",
month = dec,
year = "2021",
address = "Online",
publisher = "Australasian Language Technology Association",
url = "https://aclanthology.org/2021.alta-1.22",
pages = "192--200",
abstract = "Transformer encoder models exhibit strong performance in single-domain applications. However, in a cross-domain situation, using a sub-word vocabulary model results in sub-word overlap. This is an issue when there is an overlap between sub-words that share no semantic similarity between domains. We hypothesize that alleviating this overlap allows for a more effective modeling of multi-domain tasks; we consider the biomedical and general domains in this paper. We present a study on reducing sub-word overlap by scaling the vocabulary size in a Transformer encoder model while pretraining with multiple domains. We observe a significant increase in downstream performance in the general-biomedical cross-domain from a reduction in sub-word overlap.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="nguyen-etal-2021-cross-domain">
<titleInfo>
<title>Cross-Domain Language Modeling: An Empirical Investigation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Vincent</namePart>
<namePart type="family">Nguyen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sarvnaz</namePart>
<namePart type="family">Karimi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Maciej</namePart>
<namePart type="family">Rybinski</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhenchang</namePart>
<namePart type="family">Xing</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 19th Annual Workshop of the Australasian Language Technology Association</title>
</titleInfo>
<name type="personal">
<namePart type="given">Afshin</namePart>
<namePart type="family">Rahimi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">William</namePart>
<namePart type="family">Lane</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Guido</namePart>
<namePart type="family">Zuccon</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Australasian Language Technology Association</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Transformer encoder models exhibit strong performance in single-domain applications. However, in a cross-domain situation, using a sub-word vocabulary model results in sub-word overlap. This is an issue when there is an overlap between sub-words that share no semantic similarity between domains. We hypothesize that alleviating this overlap allows for a more effective modeling of multi-domain tasks; we consider the biomedical and general domains in this paper. We present a study on reducing sub-word overlap by scaling the vocabulary size in a Transformer encoder model while pretraining with multiple domains. We observe a significant increase in downstream performance in the general-biomedical cross-domain from a reduction in sub-word overlap.</abstract>
<identifier type="citekey">nguyen-etal-2021-cross-domain</identifier>
<location>
<url>https://aclanthology.org/2021.alta-1.22</url>
</location>
<part>
<date>2021-12</date>
<extent unit="page">
<start>192</start>
<end>200</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Cross-Domain Language Modeling: An Empirical Investigation
%A Nguyen, Vincent
%A Karimi, Sarvnaz
%A Rybinski, Maciej
%A Xing, Zhenchang
%Y Rahimi, Afshin
%Y Lane, William
%Y Zuccon, Guido
%S Proceedings of the 19th Annual Workshop of the Australasian Language Technology Association
%D 2021
%8 December
%I Australasian Language Technology Association
%C Online
%F nguyen-etal-2021-cross-domain
%X Transformer encoder models exhibit strong performance in single-domain applications. However, in a cross-domain situation, using a sub-word vocabulary model results in sub-word overlap. This is an issue when there is an overlap between sub-words that share no semantic similarity between domains. We hypothesize that alleviating this overlap allows for a more effective modeling of multi-domain tasks; we consider the biomedical and general domains in this paper. We present a study on reducing sub-word overlap by scaling the vocabulary size in a Transformer encoder model while pretraining with multiple domains. We observe a significant increase in downstream performance in the general-biomedical cross-domain from a reduction in sub-word overlap.
%U https://aclanthology.org/2021.alta-1.22
%P 192-200
Markdown (Informal)
[Cross-Domain Language Modeling: An Empirical Investigation](https://aclanthology.org/2021.alta-1.22) (Nguyen et al., ALTA 2021)
ACL
- Vincent Nguyen, Sarvnaz Karimi, Maciej Rybinski, and Zhenchang Xing. 2021. Cross-Domain Language Modeling: An Empirical Investigation. In Proceedings of the 19th Annual Workshop of the Australasian Language Technology Association, pages 192–200, Online. Australasian Language Technology Association.