@inproceedings{zhang-etal-2023-turn,
title = "Turn-Level Active Learning for Dialogue State Tracking",
author = "Zhang, Zihan and
Fang, Meng and
Ye, Fanghua and
Chen, Ling and
Namazi-Rad, Mohammad-Reza",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.emnlp-main.478",
doi = "10.18653/v1/2023.emnlp-main.478",
pages = "7705--7719",
abstract = "Dialogue state tracking (DST) plays an important role in task-oriented dialogue systems. However, collecting a large amount of turn-by-turn annotated dialogue data is costly and inefficient. In this paper, we propose a novel turn-level active learning framework for DST to actively select turns in dialogues to annotate. Given the limited labelling budget, experimental results demonstrate the effectiveness of selective annotation of dialogue turns. Additionally, our approach can effectively achieve comparable DST performance to traditional training approaches with significantly less annotated data, which provides a more efficient way to annotate new dialogue data.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="zhang-etal-2023-turn">
<titleInfo>
<title>Turn-Level Active Learning for Dialogue State Tracking</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zihan</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Meng</namePart>
<namePart type="family">Fang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Fanghua</namePart>
<namePart type="family">Ye</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ling</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mohammad-Reza</namePart>
<namePart type="family">Namazi-Rad</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Houda</namePart>
<namePart type="family">Bouamor</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Juan</namePart>
<namePart type="family">Pino</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kalika</namePart>
<namePart type="family">Bali</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Singapore</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Dialogue state tracking (DST) plays an important role in task-oriented dialogue systems. However, collecting a large amount of turn-by-turn annotated dialogue data is costly and inefficient. In this paper, we propose a novel turn-level active learning framework for DST to actively select turns in dialogues to annotate. Given the limited labelling budget, experimental results demonstrate the effectiveness of selective annotation of dialogue turns. Additionally, our approach can effectively achieve comparable DST performance to traditional training approaches with significantly less annotated data, which provides a more efficient way to annotate new dialogue data.</abstract>
<identifier type="citekey">zhang-etal-2023-turn</identifier>
<identifier type="doi">10.18653/v1/2023.emnlp-main.478</identifier>
<location>
<url>https://aclanthology.org/2023.emnlp-main.478</url>
</location>
<part>
<date>2023-12</date>
<extent unit="page">
<start>7705</start>
<end>7719</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Turn-Level Active Learning for Dialogue State Tracking
%A Zhang, Zihan
%A Fang, Meng
%A Ye, Fanghua
%A Chen, Ling
%A Namazi-Rad, Mohammad-Reza
%Y Bouamor, Houda
%Y Pino, Juan
%Y Bali, Kalika
%S Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing
%D 2023
%8 December
%I Association for Computational Linguistics
%C Singapore
%F zhang-etal-2023-turn
%X Dialogue state tracking (DST) plays an important role in task-oriented dialogue systems. However, collecting a large amount of turn-by-turn annotated dialogue data is costly and inefficient. In this paper, we propose a novel turn-level active learning framework for DST to actively select turns in dialogues to annotate. Given the limited labelling budget, experimental results demonstrate the effectiveness of selective annotation of dialogue turns. Additionally, our approach can effectively achieve comparable DST performance to traditional training approaches with significantly less annotated data, which provides a more efficient way to annotate new dialogue data.
%R 10.18653/v1/2023.emnlp-main.478
%U https://aclanthology.org/2023.emnlp-main.478
%U https://doi.org/10.18653/v1/2023.emnlp-main.478
%P 7705-7719
Markdown (Informal)
[Turn-Level Active Learning for Dialogue State Tracking](https://aclanthology.org/2023.emnlp-main.478) (Zhang et al., EMNLP 2023)
ACL
- Zihan Zhang, Meng Fang, Fanghua Ye, Ling Chen, and Mohammad-Reza Namazi-Rad. 2023. Turn-Level Active Learning for Dialogue State Tracking. In Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing, pages 7705–7719, Singapore. Association for Computational Linguistics.