@inproceedings{cattan-etal-2021-cross,
title = "On the cross-lingual transferability of multilingual prototypical models across {NLU} tasks",
author = "Cattan, Oralie and
Rosset, Sophie and
Servan, Christophe",
editor = "Lee, Hung-Yi and
Mohtarami, Mitra and
Li, Shang-Wen and
Jin, Di and
Korpusik, Mandy and
Dong, Shuyan and
Vu, Ngoc Thang and
Hakkani-Tur, Dilek",
booktitle = "Proceedings of the 1st Workshop on Meta Learning and Its Applications to Natural Language Processing",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.metanlp-1.5",
doi = "10.18653/v1/2021.metanlp-1.5",
pages = "36--43",
abstract = "Supervised deep learning-based approaches have been applied to task-oriented dialog and have proven to be effective for limited domain and language applications when a sufficient number of training examples are available. In practice, these approaches suffer from the drawbacks of domain-driven design and under-resourced languages. Domain and language models are supposed to grow and change as the problem space evolves. On one hand, research on transfer learning has demonstrated the cross-lingual ability of multilingual Transformers-based models to learn semantically rich representations. On the other, in addition to the above approaches, meta-learning have enabled the development of task and language learning algorithms capable of far generalization. Through this context, this article proposes to investigate the cross-lingual transferability of using synergistically few-shot learning with prototypical neural networks and multilingual Transformers-based models. Experiments in natural language understanding tasks on MultiATIS++ corpus shows that our approach substantially improves the observed transfer learning performances between the low and the high resource languages. More generally our approach confirms that the meaningful latent space learned in a given language can be can be generalized to unseen and under-resourced ones using meta-learning.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="cattan-etal-2021-cross">
<titleInfo>
<title>On the cross-lingual transferability of multilingual prototypical models across NLU tasks</title>
</titleInfo>
<name type="personal">
<namePart type="given">Oralie</namePart>
<namePart type="family">Cattan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sophie</namePart>
<namePart type="family">Rosset</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christophe</namePart>
<namePart type="family">Servan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 1st Workshop on Meta Learning and Its Applications to Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Hung-Yi</namePart>
<namePart type="family">Lee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mitra</namePart>
<namePart type="family">Mohtarami</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shang-Wen</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Di</namePart>
<namePart type="family">Jin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mandy</namePart>
<namePart type="family">Korpusik</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shuyan</namePart>
<namePart type="family">Dong</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ngoc</namePart>
<namePart type="given">Thang</namePart>
<namePart type="family">Vu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dilek</namePart>
<namePart type="family">Hakkani-Tur</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Supervised deep learning-based approaches have been applied to task-oriented dialog and have proven to be effective for limited domain and language applications when a sufficient number of training examples are available. In practice, these approaches suffer from the drawbacks of domain-driven design and under-resourced languages. Domain and language models are supposed to grow and change as the problem space evolves. On one hand, research on transfer learning has demonstrated the cross-lingual ability of multilingual Transformers-based models to learn semantically rich representations. On the other, in addition to the above approaches, meta-learning have enabled the development of task and language learning algorithms capable of far generalization. Through this context, this article proposes to investigate the cross-lingual transferability of using synergistically few-shot learning with prototypical neural networks and multilingual Transformers-based models. Experiments in natural language understanding tasks on MultiATIS++ corpus shows that our approach substantially improves the observed transfer learning performances between the low and the high resource languages. More generally our approach confirms that the meaningful latent space learned in a given language can be can be generalized to unseen and under-resourced ones using meta-learning.</abstract>
<identifier type="citekey">cattan-etal-2021-cross</identifier>
<identifier type="doi">10.18653/v1/2021.metanlp-1.5</identifier>
<location>
<url>https://aclanthology.org/2021.metanlp-1.5</url>
</location>
<part>
<date>2021-08</date>
<extent unit="page">
<start>36</start>
<end>43</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T On the cross-lingual transferability of multilingual prototypical models across NLU tasks
%A Cattan, Oralie
%A Rosset, Sophie
%A Servan, Christophe
%Y Lee, Hung-Yi
%Y Mohtarami, Mitra
%Y Li, Shang-Wen
%Y Jin, Di
%Y Korpusik, Mandy
%Y Dong, Shuyan
%Y Vu, Ngoc Thang
%Y Hakkani-Tur, Dilek
%S Proceedings of the 1st Workshop on Meta Learning and Its Applications to Natural Language Processing
%D 2021
%8 August
%I Association for Computational Linguistics
%C Online
%F cattan-etal-2021-cross
%X Supervised deep learning-based approaches have been applied to task-oriented dialog and have proven to be effective for limited domain and language applications when a sufficient number of training examples are available. In practice, these approaches suffer from the drawbacks of domain-driven design and under-resourced languages. Domain and language models are supposed to grow and change as the problem space evolves. On one hand, research on transfer learning has demonstrated the cross-lingual ability of multilingual Transformers-based models to learn semantically rich representations. On the other, in addition to the above approaches, meta-learning have enabled the development of task and language learning algorithms capable of far generalization. Through this context, this article proposes to investigate the cross-lingual transferability of using synergistically few-shot learning with prototypical neural networks and multilingual Transformers-based models. Experiments in natural language understanding tasks on MultiATIS++ corpus shows that our approach substantially improves the observed transfer learning performances between the low and the high resource languages. More generally our approach confirms that the meaningful latent space learned in a given language can be can be generalized to unseen and under-resourced ones using meta-learning.
%R 10.18653/v1/2021.metanlp-1.5
%U https://aclanthology.org/2021.metanlp-1.5
%U https://doi.org/10.18653/v1/2021.metanlp-1.5
%P 36-43
Markdown (Informal)
[On the cross-lingual transferability of multilingual prototypical models across NLU tasks](https://aclanthology.org/2021.metanlp-1.5) (Cattan et al., MetaNLP 2021)
ACL