@inproceedings{wang-etal-2022-mico,
title = "{MICO}: Selective Search with Mutual Information Co-training",
author = "Wang, Zhanyu and
Zhang, Xiao and
Yun, Hyokun and
Teo, Choon Hui and
Chilimbi, Trishul",
booktitle = "Proceedings of the 29th International Conference on Computational Linguistics",
month = oct,
year = "2022",
address = "Gyeongju, Republic of Korea",
publisher = "International Committee on Computational Linguistics",
url = "https://aclanthology.org/2022.coling-1.102",
pages = "1179--1192",
abstract = "In contrast to traditional exhaustive search, selective search first clusters documents into several groups before all the documents are searched exhaustively by a query, to limit the search executed within one group or only a few groups. Selective search is designed to reduce the latency and computation in modern large-scale search systems. In this study, we propose MICO, a \textbf{M}utual \textbf{I}nformation \textbf{CO}-training framework for selective search with minimal supervision using the search logs. After training, MICO does not only cluster the documents, but also routes unseen queries to the relevant clusters for efficient retrieval. In our empirical experiments, MICO significantly improves the performance on multiple metrics of selective search and outperforms a number of existing competitive baselines.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wang-etal-2022-mico">
<titleInfo>
<title>MICO: Selective Search with Mutual Information Co-training</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zhanyu</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xiao</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hyokun</namePart>
<namePart type="family">Yun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Choon</namePart>
<namePart type="given">Hui</namePart>
<namePart type="family">Teo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Trishul</namePart>
<namePart type="family">Chilimbi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-10</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 29th International Conference on Computational Linguistics</title>
</titleInfo>
<originInfo>
<publisher>International Committee on Computational Linguistics</publisher>
<place>
<placeTerm type="text">Gyeongju, Republic of Korea</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In contrast to traditional exhaustive search, selective search first clusters documents into several groups before all the documents are searched exhaustively by a query, to limit the search executed within one group or only a few groups. Selective search is designed to reduce the latency and computation in modern large-scale search systems. In this study, we propose MICO, a Mutual Information CO-training framework for selective search with minimal supervision using the search logs. After training, MICO does not only cluster the documents, but also routes unseen queries to the relevant clusters for efficient retrieval. In our empirical experiments, MICO significantly improves the performance on multiple metrics of selective search and outperforms a number of existing competitive baselines.</abstract>
<identifier type="citekey">wang-etal-2022-mico</identifier>
<location>
<url>https://aclanthology.org/2022.coling-1.102</url>
</location>
<part>
<date>2022-10</date>
<extent unit="page">
<start>1179</start>
<end>1192</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T MICO: Selective Search with Mutual Information Co-training
%A Wang, Zhanyu
%A Zhang, Xiao
%A Yun, Hyokun
%A Teo, Choon Hui
%A Chilimbi, Trishul
%S Proceedings of the 29th International Conference on Computational Linguistics
%D 2022
%8 October
%I International Committee on Computational Linguistics
%C Gyeongju, Republic of Korea
%F wang-etal-2022-mico
%X In contrast to traditional exhaustive search, selective search first clusters documents into several groups before all the documents are searched exhaustively by a query, to limit the search executed within one group or only a few groups. Selective search is designed to reduce the latency and computation in modern large-scale search systems. In this study, we propose MICO, a Mutual Information CO-training framework for selective search with minimal supervision using the search logs. After training, MICO does not only cluster the documents, but also routes unseen queries to the relevant clusters for efficient retrieval. In our empirical experiments, MICO significantly improves the performance on multiple metrics of selective search and outperforms a number of existing competitive baselines.
%U https://aclanthology.org/2022.coling-1.102
%P 1179-1192
Markdown (Informal)
[MICO: Selective Search with Mutual Information Co-training](https://aclanthology.org/2022.coling-1.102) (Wang et al., COLING 2022)
ACL
- Zhanyu Wang, Xiao Zhang, Hyokun Yun, Choon Hui Teo, and Trishul Chilimbi. 2022. MICO: Selective Search with Mutual Information Co-training. In Proceedings of the 29th International Conference on Computational Linguistics, pages 1179–1192, Gyeongju, Republic of Korea. International Committee on Computational Linguistics.