@inproceedings{k-sarma-etal-2019-shallow,
title = "Shallow Domain Adaptive Embeddings for Sentiment Analysis",
author = "K Sarma, Prathusha and
Liang, Yingyu and
Sethares, William",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/D19-1557",
doi = "10.18653/v1/D19-1557",
pages = "5549--5558",
abstract = "This paper proposes a way to improve the performance of existing algorithms for text classification in domains with strong language semantics. A proposed domain adaptation layer learns weights to combine a generic and a domain specific (DS) word embedding into a domain adapted (DA) embedding. The DA word embeddings are then used as inputs to a generic encoder + classifier framework to perform a downstream task such as classification. This adaptation layer is particularly suited to data sets that are modest in size, and which are, therefore, not ideal candidates for (re)training a deep neural network architecture. Results on binary and multi-class classification tasks using popular encoder architectures, including current state-of-the-art methods (with and without the shallow adaptation layer) show the effectiveness of the proposed approach.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="k-sarma-etal-2019-shallow">
<titleInfo>
<title>Shallow Domain Adaptive Embeddings for Sentiment Analysis</title>
</titleInfo>
<name type="personal">
<namePart type="given">Prathusha</namePart>
<namePart type="family">K Sarma</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yingyu</namePart>
<namePart type="family">Liang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">William</namePart>
<namePart type="family">Sethares</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kentaro</namePart>
<namePart type="family">Inui</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jing</namePart>
<namePart type="family">Jiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vincent</namePart>
<namePart type="family">Ng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xiaojun</namePart>
<namePart type="family">Wan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Hong Kong, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper proposes a way to improve the performance of existing algorithms for text classification in domains with strong language semantics. A proposed domain adaptation layer learns weights to combine a generic and a domain specific (DS) word embedding into a domain adapted (DA) embedding. The DA word embeddings are then used as inputs to a generic encoder + classifier framework to perform a downstream task such as classification. This adaptation layer is particularly suited to data sets that are modest in size, and which are, therefore, not ideal candidates for (re)training a deep neural network architecture. Results on binary and multi-class classification tasks using popular encoder architectures, including current state-of-the-art methods (with and without the shallow adaptation layer) show the effectiveness of the proposed approach.</abstract>
<identifier type="citekey">k-sarma-etal-2019-shallow</identifier>
<identifier type="doi">10.18653/v1/D19-1557</identifier>
<location>
<url>https://aclanthology.org/D19-1557</url>
</location>
<part>
<date>2019-11</date>
<extent unit="page">
<start>5549</start>
<end>5558</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Shallow Domain Adaptive Embeddings for Sentiment Analysis
%A K Sarma, Prathusha
%A Liang, Yingyu
%A Sethares, William
%Y Inui, Kentaro
%Y Jiang, Jing
%Y Ng, Vincent
%Y Wan, Xiaojun
%S Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)
%D 2019
%8 November
%I Association for Computational Linguistics
%C Hong Kong, China
%F k-sarma-etal-2019-shallow
%X This paper proposes a way to improve the performance of existing algorithms for text classification in domains with strong language semantics. A proposed domain adaptation layer learns weights to combine a generic and a domain specific (DS) word embedding into a domain adapted (DA) embedding. The DA word embeddings are then used as inputs to a generic encoder + classifier framework to perform a downstream task such as classification. This adaptation layer is particularly suited to data sets that are modest in size, and which are, therefore, not ideal candidates for (re)training a deep neural network architecture. Results on binary and multi-class classification tasks using popular encoder architectures, including current state-of-the-art methods (with and without the shallow adaptation layer) show the effectiveness of the proposed approach.
%R 10.18653/v1/D19-1557
%U https://aclanthology.org/D19-1557
%U https://doi.org/10.18653/v1/D19-1557
%P 5549-5558
Markdown (Informal)
[Shallow Domain Adaptive Embeddings for Sentiment Analysis](https://aclanthology.org/D19-1557) (K Sarma et al., EMNLP-IJCNLP 2019)
ACL
- Prathusha K Sarma, Yingyu Liang, and William Sethares. 2019. Shallow Domain Adaptive Embeddings for Sentiment Analysis. In Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pages 5549–5558, Hong Kong, China. Association for Computational Linguistics.