@inproceedings{jiang-etal-2020-cross,
title = "Cross-lingual Information Retrieval with {BERT}",
author = "Jiang, Zhuolin and
El-Jaroudi, Amro and
Hartmann, William and
Karakos, Damianos and
Zhao, Lingjun",
editor = "McKeown, Kathy and
Oard, Douglas W. and
{Elizabeth} and
Schwartz, Richard",
booktitle = "Proceedings of the workshop on Cross-Language Search and Summarization of Text and Speech (CLSSTS2020)",
month = may,
year = "2020",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://aclanthology.org/2020.clssts-1.5",
pages = "26--31",
abstract = "Multiple neural language models have been developed recently, e.g., BERT and XLNet, and achieved impressive results in various NLP tasks including sentence classification, question answering and document ranking. In this paper, we explore the use of the popular bidirectional language model, BERT, to model and learn the relevance between English queries and foreign-language documents in the task of cross-lingual information retrieval. A deep relevance matching model based on BERT is introduced and trained by finetuning a pretrained multilingual BERT model with weak supervision, using home-made CLIR training data derived from parallel corpora. Experimental results of the retrieval of Lithuanian documents against short English queries show that our model is effective and outperforms the competitive baseline approaches.",
language = "English",
ISBN = "979-10-95546-55-9",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="jiang-etal-2020-cross">
<titleInfo>
<title>Cross-lingual Information Retrieval with BERT</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zhuolin</namePart>
<namePart type="family">Jiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amro</namePart>
<namePart type="family">El-Jaroudi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">William</namePart>
<namePart type="family">Hartmann</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Damianos</namePart>
<namePart type="family">Karakos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lingjun</namePart>
<namePart type="family">Zhao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-05</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the workshop on Cross-Language Search and Summarization of Text and Speech (CLSSTS2020)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kathy</namePart>
<namePart type="family">McKeown</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Douglas</namePart>
<namePart type="given">W</namePart>
<namePart type="family">Oard</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name>
<namePart>Elizabeth</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Richard</namePart>
<namePart type="family">Schwartz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>European Language Resources Association</publisher>
<place>
<placeTerm type="text">Marseille, France</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-10-95546-55-9</identifier>
</relatedItem>
<abstract>Multiple neural language models have been developed recently, e.g., BERT and XLNet, and achieved impressive results in various NLP tasks including sentence classification, question answering and document ranking. In this paper, we explore the use of the popular bidirectional language model, BERT, to model and learn the relevance between English queries and foreign-language documents in the task of cross-lingual information retrieval. A deep relevance matching model based on BERT is introduced and trained by finetuning a pretrained multilingual BERT model with weak supervision, using home-made CLIR training data derived from parallel corpora. Experimental results of the retrieval of Lithuanian documents against short English queries show that our model is effective and outperforms the competitive baseline approaches.</abstract>
<identifier type="citekey">jiang-etal-2020-cross</identifier>
<location>
<url>https://aclanthology.org/2020.clssts-1.5</url>
</location>
<part>
<date>2020-05</date>
<extent unit="page">
<start>26</start>
<end>31</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Cross-lingual Information Retrieval with BERT
%A Jiang, Zhuolin
%A El-Jaroudi, Amro
%A Hartmann, William
%A Karakos, Damianos
%A Zhao, Lingjun
%Y McKeown, Kathy
%Y Oard, Douglas W.
%Y Schwartz, Richard
%E Elizabeth
%S Proceedings of the workshop on Cross-Language Search and Summarization of Text and Speech (CLSSTS2020)
%D 2020
%8 May
%I European Language Resources Association
%C Marseille, France
%@ 979-10-95546-55-9
%G English
%F jiang-etal-2020-cross
%X Multiple neural language models have been developed recently, e.g., BERT and XLNet, and achieved impressive results in various NLP tasks including sentence classification, question answering and document ranking. In this paper, we explore the use of the popular bidirectional language model, BERT, to model and learn the relevance between English queries and foreign-language documents in the task of cross-lingual information retrieval. A deep relevance matching model based on BERT is introduced and trained by finetuning a pretrained multilingual BERT model with weak supervision, using home-made CLIR training data derived from parallel corpora. Experimental results of the retrieval of Lithuanian documents against short English queries show that our model is effective and outperforms the competitive baseline approaches.
%U https://aclanthology.org/2020.clssts-1.5
%P 26-31
Markdown (Informal)
[Cross-lingual Information Retrieval with BERT](https://aclanthology.org/2020.clssts-1.5) (Jiang et al., CLSSTS 2020)
ACL
- Zhuolin Jiang, Amro El-Jaroudi, William Hartmann, Damianos Karakos, and Lingjun Zhao. 2020. Cross-lingual Information Retrieval with BERT. In Proceedings of the workshop on Cross-Language Search and Summarization of Text and Speech (CLSSTS2020), pages 26–31, Marseille, France. European Language Resources Association.