@inproceedings{ji-etal-2020-amazing,
title = "The Amazing World of Neural Language Generation",
author = "Ji, Yangfeng and
Bosselut, Antoine and
Wolf, Thomas and
Celikyilmaz, Asli",
editor = "Villavicencio, Aline and
Van Durme, Benjamin",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.emnlp-tutorials.7",
doi = "10.18653/v1/2020.emnlp-tutorials.7",
pages = "37--42",
abstract = "Neural Language Generation (NLG) {--} using neural network models to generate coherent text {--} is among the most promising methods for automated text creation. Recent years have seen a paradigm shift in neural text generation, caused by the advances in deep contextual language modeling (e.g., LSTMs, GPT, GPT2) and transfer learning (e.g., ELMo, BERT). While these tools have dramatically improved the state of NLG, particularly for low resources tasks, state-of-the-art NLG models still face many challenges: a lack of diversity in generated text, commonsense violations in depicted situations, difficulties in making use of factual information, and difficulties in designing reliable evaluation metrics. In this tutorial, we will present an overview of the current state-of-the-art in neural network architectures, and how they shaped recent research directions in text generation. We will discuss how and why these models succeed/fail at generating coherent text, and provide insights on several applications.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="ji-etal-2020-amazing">
<titleInfo>
<title>The Amazing World of Neural Language Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yangfeng</namePart>
<namePart type="family">Ji</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Antoine</namePart>
<namePart type="family">Bosselut</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Thomas</namePart>
<namePart type="family">Wolf</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Asli</namePart>
<namePart type="family">Celikyilmaz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts</title>
</titleInfo>
<name type="personal">
<namePart type="given">Aline</namePart>
<namePart type="family">Villavicencio</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Benjamin</namePart>
<namePart type="family">Van Durme</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Neural Language Generation (NLG) – using neural network models to generate coherent text – is among the most promising methods for automated text creation. Recent years have seen a paradigm shift in neural text generation, caused by the advances in deep contextual language modeling (e.g., LSTMs, GPT, GPT2) and transfer learning (e.g., ELMo, BERT). While these tools have dramatically improved the state of NLG, particularly for low resources tasks, state-of-the-art NLG models still face many challenges: a lack of diversity in generated text, commonsense violations in depicted situations, difficulties in making use of factual information, and difficulties in designing reliable evaluation metrics. In this tutorial, we will present an overview of the current state-of-the-art in neural network architectures, and how they shaped recent research directions in text generation. We will discuss how and why these models succeed/fail at generating coherent text, and provide insights on several applications.</abstract>
<identifier type="citekey">ji-etal-2020-amazing</identifier>
<identifier type="doi">10.18653/v1/2020.emnlp-tutorials.7</identifier>
<location>
<url>https://aclanthology.org/2020.emnlp-tutorials.7</url>
</location>
<part>
<date>2020-11</date>
<extent unit="page">
<start>37</start>
<end>42</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T The Amazing World of Neural Language Generation
%A Ji, Yangfeng
%A Bosselut, Antoine
%A Wolf, Thomas
%A Celikyilmaz, Asli
%Y Villavicencio, Aline
%Y Van Durme, Benjamin
%S Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts
%D 2020
%8 November
%I Association for Computational Linguistics
%C Online
%F ji-etal-2020-amazing
%X Neural Language Generation (NLG) – using neural network models to generate coherent text – is among the most promising methods for automated text creation. Recent years have seen a paradigm shift in neural text generation, caused by the advances in deep contextual language modeling (e.g., LSTMs, GPT, GPT2) and transfer learning (e.g., ELMo, BERT). While these tools have dramatically improved the state of NLG, particularly for low resources tasks, state-of-the-art NLG models still face many challenges: a lack of diversity in generated text, commonsense violations in depicted situations, difficulties in making use of factual information, and difficulties in designing reliable evaluation metrics. In this tutorial, we will present an overview of the current state-of-the-art in neural network architectures, and how they shaped recent research directions in text generation. We will discuss how and why these models succeed/fail at generating coherent text, and provide insights on several applications.
%R 10.18653/v1/2020.emnlp-tutorials.7
%U https://aclanthology.org/2020.emnlp-tutorials.7
%U https://doi.org/10.18653/v1/2020.emnlp-tutorials.7
%P 37-42
Markdown (Informal)
[The Amazing World of Neural Language Generation](https://aclanthology.org/2020.emnlp-tutorials.7) (Ji et al., EMNLP 2020)
ACL
- Yangfeng Ji, Antoine Bosselut, Thomas Wolf, and Asli Celikyilmaz. 2020. The Amazing World of Neural Language Generation. In Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: Tutorial Abstracts, pages 37–42, Online. Association for Computational Linguistics.