@inproceedings{dakota-etal-2021-bidirectional,
title = "Bidirectional Domain Adaptation Using Weighted Multi-Task Learning",
author = {Dakota, Daniel and
Sayyed, Zeeshan Ali and
K{\"u}bler, Sandra},
booktitle = "Proceedings of the 17th International Conference on Parsing Technologies and the IWPT 2021 Shared Task on Parsing into Enhanced Universal Dependencies (IWPT 2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.iwpt-1.10",
doi = "10.18653/v1/2021.iwpt-1.10",
pages = "93--105",
abstract = "Domain adaption in syntactic parsing is still a significant challenge. We address the issue of data imbalance between the in-domain and out-of-domain treebank typically used for the problem. We define domain adaptation as a Multi-task learning (MTL) problem, which allows us to train two parsers, one for each do-main. Our results show that the MTL approach is beneficial for the smaller treebank. For the larger treebank, we need to use loss weighting in order to avoid a decrease in performance be-low the single task. In order to determine towhat degree the data imbalance between two domains and the domain differences affect results, we also carry out an experiment with two imbalanced in-domain treebanks and show that loss weighting also improves performance in an in-domain setting. Given loss weighting in MTL, we can improve results for both parsers.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="dakota-etal-2021-bidirectional">
<titleInfo>
<title>Bidirectional Domain Adaptation Using Weighted Multi-Task Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Daniel</namePart>
<namePart type="family">Dakota</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zeeshan</namePart>
<namePart type="given">Ali</namePart>
<namePart type="family">Sayyed</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sandra</namePart>
<namePart type="family">Kübler</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 17th International Conference on Parsing Technologies and the IWPT 2021 Shared Task on Parsing into Enhanced Universal Dependencies (IWPT 2021)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Domain adaption in syntactic parsing is still a significant challenge. We address the issue of data imbalance between the in-domain and out-of-domain treebank typically used for the problem. We define domain adaptation as a Multi-task learning (MTL) problem, which allows us to train two parsers, one for each do-main. Our results show that the MTL approach is beneficial for the smaller treebank. For the larger treebank, we need to use loss weighting in order to avoid a decrease in performance be-low the single task. In order to determine towhat degree the data imbalance between two domains and the domain differences affect results, we also carry out an experiment with two imbalanced in-domain treebanks and show that loss weighting also improves performance in an in-domain setting. Given loss weighting in MTL, we can improve results for both parsers.</abstract>
<identifier type="citekey">dakota-etal-2021-bidirectional</identifier>
<identifier type="doi">10.18653/v1/2021.iwpt-1.10</identifier>
<location>
<url>https://aclanthology.org/2021.iwpt-1.10</url>
</location>
<part>
<date>2021-08</date>
<extent unit="page">
<start>93</start>
<end>105</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Bidirectional Domain Adaptation Using Weighted Multi-Task Learning
%A Dakota, Daniel
%A Sayyed, Zeeshan Ali
%A Kübler, Sandra
%S Proceedings of the 17th International Conference on Parsing Technologies and the IWPT 2021 Shared Task on Parsing into Enhanced Universal Dependencies (IWPT 2021)
%D 2021
%8 August
%I Association for Computational Linguistics
%C Online
%F dakota-etal-2021-bidirectional
%X Domain adaption in syntactic parsing is still a significant challenge. We address the issue of data imbalance between the in-domain and out-of-domain treebank typically used for the problem. We define domain adaptation as a Multi-task learning (MTL) problem, which allows us to train two parsers, one for each do-main. Our results show that the MTL approach is beneficial for the smaller treebank. For the larger treebank, we need to use loss weighting in order to avoid a decrease in performance be-low the single task. In order to determine towhat degree the data imbalance between two domains and the domain differences affect results, we also carry out an experiment with two imbalanced in-domain treebanks and show that loss weighting also improves performance in an in-domain setting. Given loss weighting in MTL, we can improve results for both parsers.
%R 10.18653/v1/2021.iwpt-1.10
%U https://aclanthology.org/2021.iwpt-1.10
%U https://doi.org/10.18653/v1/2021.iwpt-1.10
%P 93-105
Markdown (Informal)
[Bidirectional Domain Adaptation Using Weighted Multi-Task Learning](https://aclanthology.org/2021.iwpt-1.10) (Dakota et al., IWPT 2021)
ACL
- Daniel Dakota, Zeeshan Ali Sayyed, and Sandra Kübler. 2021. Bidirectional Domain Adaptation Using Weighted Multi-Task Learning. In Proceedings of the 17th International Conference on Parsing Technologies and the IWPT 2021 Shared Task on Parsing into Enhanced Universal Dependencies (IWPT 2021), pages 93–105, Online. Association for Computational Linguistics.