@inproceedings{feng-shao-2022-non,
title = "Non-Autoregressive Models for Fast Sequence Generation",
author = "Feng, Yang and
Shao, Chenze",
editor = "El-Beltagy, Samhaa R. and
Qiu, Xipeng",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts",
month = dec,
year = "2022",
address = "Abu Dubai, UAE",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.emnlp-tutorials.6",
doi = "10.18653/v1/2022.emnlp-tutorials.6",
pages = "30--35",
abstract = "Autoregressive (AR) models have achieved great success in various sequence generation tasks. However, AR models can only generate target sequence word-by-word due to the AR mechanism and hence suffer from slow inference. Recently, non-autoregressive (NAR) models, which generate all the tokens in parallel by removing the sequential dependencies within the target sequence, have received increasing attention in sequence generation tasks such as neural machine translation (NMT), automatic speech recognition (ASR), and text to speech (TTS). In this tutorial, we will provide a comprehensive introduction to non-autoregressive sequence generation.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="feng-shao-2022-non">
<titleInfo>
<title>Non-Autoregressive Models for Fast Sequence Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Feng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chenze</namePart>
<namePart type="family">Shao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts</title>
</titleInfo>
<name type="personal">
<namePart type="given">Samhaa</namePart>
<namePart type="given">R</namePart>
<namePart type="family">El-Beltagy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xipeng</namePart>
<namePart type="family">Qiu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Abu Dubai, UAE</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Autoregressive (AR) models have achieved great success in various sequence generation tasks. However, AR models can only generate target sequence word-by-word due to the AR mechanism and hence suffer from slow inference. Recently, non-autoregressive (NAR) models, which generate all the tokens in parallel by removing the sequential dependencies within the target sequence, have received increasing attention in sequence generation tasks such as neural machine translation (NMT), automatic speech recognition (ASR), and text to speech (TTS). In this tutorial, we will provide a comprehensive introduction to non-autoregressive sequence generation.</abstract>
<identifier type="citekey">feng-shao-2022-non</identifier>
<identifier type="doi">10.18653/v1/2022.emnlp-tutorials.6</identifier>
<location>
<url>https://aclanthology.org/2022.emnlp-tutorials.6</url>
</location>
<part>
<date>2022-12</date>
<extent unit="page">
<start>30</start>
<end>35</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Non-Autoregressive Models for Fast Sequence Generation
%A Feng, Yang
%A Shao, Chenze
%Y El-Beltagy, Samhaa R.
%Y Qiu, Xipeng
%S Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts
%D 2022
%8 December
%I Association for Computational Linguistics
%C Abu Dubai, UAE
%F feng-shao-2022-non
%X Autoregressive (AR) models have achieved great success in various sequence generation tasks. However, AR models can only generate target sequence word-by-word due to the AR mechanism and hence suffer from slow inference. Recently, non-autoregressive (NAR) models, which generate all the tokens in parallel by removing the sequential dependencies within the target sequence, have received increasing attention in sequence generation tasks such as neural machine translation (NMT), automatic speech recognition (ASR), and text to speech (TTS). In this tutorial, we will provide a comprehensive introduction to non-autoregressive sequence generation.
%R 10.18653/v1/2022.emnlp-tutorials.6
%U https://aclanthology.org/2022.emnlp-tutorials.6
%U https://doi.org/10.18653/v1/2022.emnlp-tutorials.6
%P 30-35
Markdown (Informal)
[Non-Autoregressive Models for Fast Sequence Generation](https://aclanthology.org/2022.emnlp-tutorials.6) (Feng & Shao, EMNLP 2022)
ACL
- Yang Feng and Chenze Shao. 2022. Non-Autoregressive Models for Fast Sequence Generation. In Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts, pages 30–35, Abu Dubai, UAE. Association for Computational Linguistics.