@inproceedings{taslimipoor-etal-2019-cross,
title = "Cross-lingual Transfer Learning and Multitask Learning for Capturing Multiword Expressions",
author = "Taslimipoor, Shiva and
Rohanian, Omid and
Ha, Le An",
editor = "Savary, Agata and
Escart{\'\i}n, Carla Parra and
Bond, Francis and
Mitrovi{\'c}, Jelena and
Mititelu, Verginica Barbu",
booktitle = "Proceedings of the Joint Workshop on Multiword Expressions and WordNet (MWE-WN 2019)",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W19-5119",
doi = "10.18653/v1/W19-5119",
pages = "155--161",
abstract = "Recent developments in deep learning have prompted a surge of interest in the application of multitask and transfer learning to NLP problems. In this study, we explore for the first time, the application of transfer learning (TRL) and multitask learning (MTL) to the identification of Multiword Expressions (MWEs). For MTL, we exploit the shared syntactic information between MWE and dependency parsing models to jointly train a single model on both tasks. We specifically predict two types of labels: MWE and dependency parse. Our neural MTL architecture utilises the supervision of dependency parsing in lower layers and predicts MWE tags in upper layers. In the TRL scenario, we overcome the scarcity of data by learning a model on a larger MWE dataset and transferring the knowledge to a resource-poor setting in another language. In both scenarios, the resulting models achieved higher performance compared to standard neural approaches.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="taslimipoor-etal-2019-cross">
<titleInfo>
<title>Cross-lingual Transfer Learning and Multitask Learning for Capturing Multiword Expressions</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shiva</namePart>
<namePart type="family">Taslimipoor</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Omid</namePart>
<namePart type="family">Rohanian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Le</namePart>
<namePart type="given">An</namePart>
<namePart type="family">Ha</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Joint Workshop on Multiword Expressions and WordNet (MWE-WN 2019)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Agata</namePart>
<namePart type="family">Savary</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Carla</namePart>
<namePart type="given">Parra</namePart>
<namePart type="family">Escartín</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Francis</namePart>
<namePart type="family">Bond</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jelena</namePart>
<namePart type="family">Mitrović</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Verginica</namePart>
<namePart type="given">Barbu</namePart>
<namePart type="family">Mititelu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Florence, Italy</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Recent developments in deep learning have prompted a surge of interest in the application of multitask and transfer learning to NLP problems. In this study, we explore for the first time, the application of transfer learning (TRL) and multitask learning (MTL) to the identification of Multiword Expressions (MWEs). For MTL, we exploit the shared syntactic information between MWE and dependency parsing models to jointly train a single model on both tasks. We specifically predict two types of labels: MWE and dependency parse. Our neural MTL architecture utilises the supervision of dependency parsing in lower layers and predicts MWE tags in upper layers. In the TRL scenario, we overcome the scarcity of data by learning a model on a larger MWE dataset and transferring the knowledge to a resource-poor setting in another language. In both scenarios, the resulting models achieved higher performance compared to standard neural approaches.</abstract>
<identifier type="citekey">taslimipoor-etal-2019-cross</identifier>
<identifier type="doi">10.18653/v1/W19-5119</identifier>
<location>
<url>https://aclanthology.org/W19-5119</url>
</location>
<part>
<date>2019-08</date>
<extent unit="page">
<start>155</start>
<end>161</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Cross-lingual Transfer Learning and Multitask Learning for Capturing Multiword Expressions
%A Taslimipoor, Shiva
%A Rohanian, Omid
%A Ha, Le An
%Y Savary, Agata
%Y Escartín, Carla Parra
%Y Bond, Francis
%Y Mitrović, Jelena
%Y Mititelu, Verginica Barbu
%S Proceedings of the Joint Workshop on Multiword Expressions and WordNet (MWE-WN 2019)
%D 2019
%8 August
%I Association for Computational Linguistics
%C Florence, Italy
%F taslimipoor-etal-2019-cross
%X Recent developments in deep learning have prompted a surge of interest in the application of multitask and transfer learning to NLP problems. In this study, we explore for the first time, the application of transfer learning (TRL) and multitask learning (MTL) to the identification of Multiword Expressions (MWEs). For MTL, we exploit the shared syntactic information between MWE and dependency parsing models to jointly train a single model on both tasks. We specifically predict two types of labels: MWE and dependency parse. Our neural MTL architecture utilises the supervision of dependency parsing in lower layers and predicts MWE tags in upper layers. In the TRL scenario, we overcome the scarcity of data by learning a model on a larger MWE dataset and transferring the knowledge to a resource-poor setting in another language. In both scenarios, the resulting models achieved higher performance compared to standard neural approaches.
%R 10.18653/v1/W19-5119
%U https://aclanthology.org/W19-5119
%U https://doi.org/10.18653/v1/W19-5119
%P 155-161
Markdown (Informal)
[Cross-lingual Transfer Learning and Multitask Learning for Capturing Multiword Expressions](https://aclanthology.org/W19-5119) (Taslimipoor et al., MWE 2019)
ACL