@inproceedings{jiang-2023-tadi,
title = "{TADI}: Topic-aware Attention and Powerful Dual-encoder Interaction for Recall in News Recommendation",
author = "Jiang, Junxiang",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.findings-emnlp.1047",
doi = "10.18653/v1/2023.findings-emnlp.1047",
pages = "15647--15658",
abstract = "News recommendation is one of the widest commercialization in natural language processing research area, which aims to recommend news according to user interests. New recall plays an important role in news recommendation. It is to recall candidates from a very large news database. Recent researches of news recall mostly adopt dual-encoder architecture as it provides a much faster recall scheme, and they encode each word equally. However, these works remain two challenges: irrelevant word distraction and weak dual-encoder interaction. Therefore, we propose a model Topic-aware Attention and powerful Dual-encoder Interaction for Recall in news recommendation (TADI). To avoid irrelevant word distraction, TADI designs a Topic-aware Attention (TA) which weights words according to news topics. To enhance dual-encoder interaction, TADI provides a cheap yet powerful interaction module, namely Dual-encoder Interaction (DI). DI helps dual encoders interact powerfully based on two aux targets. After performance comparisons between TADI and state-of-the-arts in a series of experiments, we verify the effectiveness of TADI.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="jiang-2023-tadi">
<titleInfo>
<title>TADI: Topic-aware Attention and Powerful Dual-encoder Interaction for Recall in News Recommendation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Junxiang</namePart>
<namePart type="family">Jiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2023</title>
</titleInfo>
<name type="personal">
<namePart type="given">Houda</namePart>
<namePart type="family">Bouamor</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Juan</namePart>
<namePart type="family">Pino</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kalika</namePart>
<namePart type="family">Bali</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Singapore</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>News recommendation is one of the widest commercialization in natural language processing research area, which aims to recommend news according to user interests. New recall plays an important role in news recommendation. It is to recall candidates from a very large news database. Recent researches of news recall mostly adopt dual-encoder architecture as it provides a much faster recall scheme, and they encode each word equally. However, these works remain two challenges: irrelevant word distraction and weak dual-encoder interaction. Therefore, we propose a model Topic-aware Attention and powerful Dual-encoder Interaction for Recall in news recommendation (TADI). To avoid irrelevant word distraction, TADI designs a Topic-aware Attention (TA) which weights words according to news topics. To enhance dual-encoder interaction, TADI provides a cheap yet powerful interaction module, namely Dual-encoder Interaction (DI). DI helps dual encoders interact powerfully based on two aux targets. After performance comparisons between TADI and state-of-the-arts in a series of experiments, we verify the effectiveness of TADI.</abstract>
<identifier type="citekey">jiang-2023-tadi</identifier>
<identifier type="doi">10.18653/v1/2023.findings-emnlp.1047</identifier>
<location>
<url>https://aclanthology.org/2023.findings-emnlp.1047</url>
</location>
<part>
<date>2023-12</date>
<extent unit="page">
<start>15647</start>
<end>15658</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T TADI: Topic-aware Attention and Powerful Dual-encoder Interaction for Recall in News Recommendation
%A Jiang, Junxiang
%Y Bouamor, Houda
%Y Pino, Juan
%Y Bali, Kalika
%S Findings of the Association for Computational Linguistics: EMNLP 2023
%D 2023
%8 December
%I Association for Computational Linguistics
%C Singapore
%F jiang-2023-tadi
%X News recommendation is one of the widest commercialization in natural language processing research area, which aims to recommend news according to user interests. New recall plays an important role in news recommendation. It is to recall candidates from a very large news database. Recent researches of news recall mostly adopt dual-encoder architecture as it provides a much faster recall scheme, and they encode each word equally. However, these works remain two challenges: irrelevant word distraction and weak dual-encoder interaction. Therefore, we propose a model Topic-aware Attention and powerful Dual-encoder Interaction for Recall in news recommendation (TADI). To avoid irrelevant word distraction, TADI designs a Topic-aware Attention (TA) which weights words according to news topics. To enhance dual-encoder interaction, TADI provides a cheap yet powerful interaction module, namely Dual-encoder Interaction (DI). DI helps dual encoders interact powerfully based on two aux targets. After performance comparisons between TADI and state-of-the-arts in a series of experiments, we verify the effectiveness of TADI.
%R 10.18653/v1/2023.findings-emnlp.1047
%U https://aclanthology.org/2023.findings-emnlp.1047
%U https://doi.org/10.18653/v1/2023.findings-emnlp.1047
%P 15647-15658
Markdown (Informal)
[TADI: Topic-aware Attention and Powerful Dual-encoder Interaction for Recall in News Recommendation](https://aclanthology.org/2023.findings-emnlp.1047) (Jiang, Findings 2023)
ACL