@inproceedings{ruder-sil-2021-multi,
title = "Multi-Domain Multilingual Question Answering",
author = "Ruder, Sebastian and
Sil, Avi",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic {\&} Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.emnlp-tutorials.4",
doi = "10.18653/v1/2021.emnlp-tutorials.4",
pages = "17--21",
abstract = "Question answering (QA) is one of the most challenging and impactful tasks in natural language processing. Most research in QA, however, has focused on the open-domain or monolingual setting while most real-world applications deal with specific domains or languages. In this tutorial, we attempt to bridge this gap. Firstly, we introduce standard benchmarks in multi-domain and multilingual QA. In both scenarios, we discuss state-of-the-art approaches that achieve impressive performance, ranging from zero-shot transfer learning to out-of-the-box training with open-domain QA systems. Finally, we will present open research problems that this new research agenda poses such as multi-task learning, cross-lingual transfer learning, domain adaptation and training large scale pre-trained multilingual language models.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="ruder-sil-2021-multi">
<titleInfo>
<title>Multi-Domain Multilingual Question Answering</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sebastian</namePart>
<namePart type="family">Ruder</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Avi</namePart>
<namePart type="family">Sil</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Punta Cana, Dominican Republic & Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Question answering (QA) is one of the most challenging and impactful tasks in natural language processing. Most research in QA, however, has focused on the open-domain or monolingual setting while most real-world applications deal with specific domains or languages. In this tutorial, we attempt to bridge this gap. Firstly, we introduce standard benchmarks in multi-domain and multilingual QA. In both scenarios, we discuss state-of-the-art approaches that achieve impressive performance, ranging from zero-shot transfer learning to out-of-the-box training with open-domain QA systems. Finally, we will present open research problems that this new research agenda poses such as multi-task learning, cross-lingual transfer learning, domain adaptation and training large scale pre-trained multilingual language models.</abstract>
<identifier type="citekey">ruder-sil-2021-multi</identifier>
<identifier type="doi">10.18653/v1/2021.emnlp-tutorials.4</identifier>
<location>
<url>https://aclanthology.org/2021.emnlp-tutorials.4</url>
</location>
<part>
<date>2021-11</date>
<extent unit="page">
<start>17</start>
<end>21</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Multi-Domain Multilingual Question Answering
%A Ruder, Sebastian
%A Sil, Avi
%S Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts
%D 2021
%8 November
%I Association for Computational Linguistics
%C Punta Cana, Dominican Republic & Online
%F ruder-sil-2021-multi
%X Question answering (QA) is one of the most challenging and impactful tasks in natural language processing. Most research in QA, however, has focused on the open-domain or monolingual setting while most real-world applications deal with specific domains or languages. In this tutorial, we attempt to bridge this gap. Firstly, we introduce standard benchmarks in multi-domain and multilingual QA. In both scenarios, we discuss state-of-the-art approaches that achieve impressive performance, ranging from zero-shot transfer learning to out-of-the-box training with open-domain QA systems. Finally, we will present open research problems that this new research agenda poses such as multi-task learning, cross-lingual transfer learning, domain adaptation and training large scale pre-trained multilingual language models.
%R 10.18653/v1/2021.emnlp-tutorials.4
%U https://aclanthology.org/2021.emnlp-tutorials.4
%U https://doi.org/10.18653/v1/2021.emnlp-tutorials.4
%P 17-21
Markdown (Informal)
[Multi-Domain Multilingual Question Answering](https://aclanthology.org/2021.emnlp-tutorials.4) (Ruder & Sil, EMNLP 2021)
ACL
- Sebastian Ruder and Avi Sil. 2021. Multi-Domain Multilingual Question Answering. In Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts, pages 17–21, Punta Cana, Dominican Republic & Online. Association for Computational Linguistics.