@inproceedings{jagatap-etal-2025-reinforcement,
title = "Reinforcement Learning for Adversarial Query Generation to Enhance Relevance in Cold-Start Product Search",
author = "Jagatap, Akshay and
Anand, Neeraj and
Singh, Sonali and
Comar, Prakash Mandayam",
editor = "Rehm, Georg and
Li, Yunyao",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 6: Industry Track)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2025.acl-industry.91/",
doi = "10.18653/v1/2025.acl-industry.91",
pages = "1300--1307",
ISBN = "979-8-89176-288-6",
abstract = "Accurate mapping of queries to product categories is crucial for efficient retrieval and ranking of relevant products in e-commerce search. Conventionally, such query classification models rely on supervised learning using historical user interactions, but their effectiveness diminishes in cold-start scenarios, where new categories or products lack sufficient training data. This results in poor query-to-category mappings, negatively affecting retrieval and ranking. Synthetic query generation has emerged as a promising solution by augmenting training data; however, existing methods do not incorporate feedback from the query relevance model, limiting their ability to generate queries that enhance product retrieval. To address this, we propose an adversarial reinforcement learning framework that optimizes an LLM-based generator to expose weaknesses in query classification models. The generator produces synthetic queries to augment the classifier{'}s training set, ultimately improving its performance. Additionally, we introduce a structured reward signal to ensure stable training. Experiments on public datasets show an average PR-AUC improvement of +1.82{\%} on benchmarks and +3.26{\%} on a proprietary dataset, demonstrating the framework{'}s effectiveness in enhancing query classification and mitigating cold-start challenges."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="jagatap-etal-2025-reinforcement">
<titleInfo>
<title>Reinforcement Learning for Adversarial Query Generation to Enhance Relevance in Cold-Start Product Search</title>
</titleInfo>
<name type="personal">
<namePart type="given">Akshay</namePart>
<namePart type="family">Jagatap</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Neeraj</namePart>
<namePart type="family">Anand</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sonali</namePart>
<namePart type="family">Singh</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Prakash</namePart>
<namePart type="given">Mandayam</namePart>
<namePart type="family">Comar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2025-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 6: Industry Track)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Georg</namePart>
<namePart type="family">Rehm</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yunyao</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Vienna, Austria</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-288-6</identifier>
</relatedItem>
<abstract>Accurate mapping of queries to product categories is crucial for efficient retrieval and ranking of relevant products in e-commerce search. Conventionally, such query classification models rely on supervised learning using historical user interactions, but their effectiveness diminishes in cold-start scenarios, where new categories or products lack sufficient training data. This results in poor query-to-category mappings, negatively affecting retrieval and ranking. Synthetic query generation has emerged as a promising solution by augmenting training data; however, existing methods do not incorporate feedback from the query relevance model, limiting their ability to generate queries that enhance product retrieval. To address this, we propose an adversarial reinforcement learning framework that optimizes an LLM-based generator to expose weaknesses in query classification models. The generator produces synthetic queries to augment the classifier’s training set, ultimately improving its performance. Additionally, we introduce a structured reward signal to ensure stable training. Experiments on public datasets show an average PR-AUC improvement of +1.82% on benchmarks and +3.26% on a proprietary dataset, demonstrating the framework’s effectiveness in enhancing query classification and mitigating cold-start challenges.</abstract>
<identifier type="citekey">jagatap-etal-2025-reinforcement</identifier>
<identifier type="doi">10.18653/v1/2025.acl-industry.91</identifier>
<location>
<url>https://aclanthology.org/2025.acl-industry.91/</url>
</location>
<part>
<date>2025-07</date>
<extent unit="page">
<start>1300</start>
<end>1307</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Reinforcement Learning for Adversarial Query Generation to Enhance Relevance in Cold-Start Product Search
%A Jagatap, Akshay
%A Anand, Neeraj
%A Singh, Sonali
%A Comar, Prakash Mandayam
%Y Rehm, Georg
%Y Li, Yunyao
%S Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 6: Industry Track)
%D 2025
%8 July
%I Association for Computational Linguistics
%C Vienna, Austria
%@ 979-8-89176-288-6
%F jagatap-etal-2025-reinforcement
%X Accurate mapping of queries to product categories is crucial for efficient retrieval and ranking of relevant products in e-commerce search. Conventionally, such query classification models rely on supervised learning using historical user interactions, but their effectiveness diminishes in cold-start scenarios, where new categories or products lack sufficient training data. This results in poor query-to-category mappings, negatively affecting retrieval and ranking. Synthetic query generation has emerged as a promising solution by augmenting training data; however, existing methods do not incorporate feedback from the query relevance model, limiting their ability to generate queries that enhance product retrieval. To address this, we propose an adversarial reinforcement learning framework that optimizes an LLM-based generator to expose weaknesses in query classification models. The generator produces synthetic queries to augment the classifier’s training set, ultimately improving its performance. Additionally, we introduce a structured reward signal to ensure stable training. Experiments on public datasets show an average PR-AUC improvement of +1.82% on benchmarks and +3.26% on a proprietary dataset, demonstrating the framework’s effectiveness in enhancing query classification and mitigating cold-start challenges.
%R 10.18653/v1/2025.acl-industry.91
%U https://aclanthology.org/2025.acl-industry.91/
%U https://doi.org/10.18653/v1/2025.acl-industry.91
%P 1300-1307
Markdown (Informal)
[Reinforcement Learning for Adversarial Query Generation to Enhance Relevance in Cold-Start Product Search](https://aclanthology.org/2025.acl-industry.91/) (Jagatap et al., ACL 2025)
ACL