@inproceedings{gehrmann-etal-2018-end,
title = "End-to-End Content and Plan Selection for Data-to-Text Generation",
author = "Gehrmann, Sebastian and
Dai, Falcon and
Elder, Henry and
Rush, Alexander",
editor = "Krahmer, Emiel and
Gatt, Albert and
Goudbeek, Martijn",
booktitle = "Proceedings of the 11th International Conference on Natural Language Generation",
month = nov,
year = "2018",
address = "Tilburg University, The Netherlands",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-6505",
doi = "10.18653/v1/W18-6505",
pages = "46--56",
abstract = "Learning to generate fluent natural language from structured data with neural networks has become an common approach for NLG. This problem can be challenging when the form of the structured data varies between examples. This paper presents a survey of several extensions to sequence-to-sequence models to account for the latent content selection process, particularly variants of copy attention and coverage decoding. We further propose a training method based on diverse ensembling to encourage models to learn distinct sentence templates during training. An empirical evaluation of these techniques shows an increase in the quality of generated text across five automated metrics, as well as human evaluation.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="gehrmann-etal-2018-end">
<titleInfo>
<title>End-to-End Content and Plan Selection for Data-to-Text Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sebastian</namePart>
<namePart type="family">Gehrmann</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Falcon</namePart>
<namePart type="family">Dai</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Henry</namePart>
<namePart type="family">Elder</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexander</namePart>
<namePart type="family">Rush</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 11th International Conference on Natural Language Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Emiel</namePart>
<namePart type="family">Krahmer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Albert</namePart>
<namePart type="family">Gatt</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Martijn</namePart>
<namePart type="family">Goudbeek</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Tilburg University, The Netherlands</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Learning to generate fluent natural language from structured data with neural networks has become an common approach for NLG. This problem can be challenging when the form of the structured data varies between examples. This paper presents a survey of several extensions to sequence-to-sequence models to account for the latent content selection process, particularly variants of copy attention and coverage decoding. We further propose a training method based on diverse ensembling to encourage models to learn distinct sentence templates during training. An empirical evaluation of these techniques shows an increase in the quality of generated text across five automated metrics, as well as human evaluation.</abstract>
<identifier type="citekey">gehrmann-etal-2018-end</identifier>
<identifier type="doi">10.18653/v1/W18-6505</identifier>
<location>
<url>https://aclanthology.org/W18-6505</url>
</location>
<part>
<date>2018-11</date>
<extent unit="page">
<start>46</start>
<end>56</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T End-to-End Content and Plan Selection for Data-to-Text Generation
%A Gehrmann, Sebastian
%A Dai, Falcon
%A Elder, Henry
%A Rush, Alexander
%Y Krahmer, Emiel
%Y Gatt, Albert
%Y Goudbeek, Martijn
%S Proceedings of the 11th International Conference on Natural Language Generation
%D 2018
%8 November
%I Association for Computational Linguistics
%C Tilburg University, The Netherlands
%F gehrmann-etal-2018-end
%X Learning to generate fluent natural language from structured data with neural networks has become an common approach for NLG. This problem can be challenging when the form of the structured data varies between examples. This paper presents a survey of several extensions to sequence-to-sequence models to account for the latent content selection process, particularly variants of copy attention and coverage decoding. We further propose a training method based on diverse ensembling to encourage models to learn distinct sentence templates during training. An empirical evaluation of these techniques shows an increase in the quality of generated text across five automated metrics, as well as human evaluation.
%R 10.18653/v1/W18-6505
%U https://aclanthology.org/W18-6505
%U https://doi.org/10.18653/v1/W18-6505
%P 46-56
Markdown (Informal)
[End-to-End Content and Plan Selection for Data-to-Text Generation](https://aclanthology.org/W18-6505) (Gehrmann et al., INLG 2018)
ACL