@inproceedings{shen-etal-2019-controlling,
title = "Controlling Sequence-to-Sequence Models - A Demonstration on Neural-based Acrostic Generator",
author = "Shen, Liang-Hsin and
Tai, Pei-Lun and
Wu, Chao-Chung and
Lin, Shou-De",
editor = "Pad{\'o}, Sebastian and
Huang, Ruihong",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP): System Demonstrations",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/D19-3008",
doi = "10.18653/v1/D19-3008",
pages = "43--48",
abstract = "An acrostic is a form of writing that the first token of each line (or other recurring features in the text) forms a meaningful sequence. In this paper we present a generalized acrostic generation system that can hide certain message in a flexible pattern specified by the users. Different from previous works that focus on rule-based solutions, here we adopt a neural- based sequence-to-sequence model to achieve this goal. Besides acrostic, users are also allowed to specify the rhyme and length of the output sequences. Based on our knowledge, this is the first neural-based natural language generation system that demonstrates the capability of performing micro-level control over output sentences.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="shen-etal-2019-controlling">
<titleInfo>
<title>Controlling Sequence-to-Sequence Models - A Demonstration on Neural-based Acrostic Generator</title>
</titleInfo>
<name type="personal">
<namePart type="given">Liang-Hsin</namePart>
<namePart type="family">Shen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pei-Lun</namePart>
<namePart type="family">Tai</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chao-Chung</namePart>
<namePart type="family">Wu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shou-De</namePart>
<namePart type="family">Lin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP): System Demonstrations</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sebastian</namePart>
<namePart type="family">Padó</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ruihong</namePart>
<namePart type="family">Huang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Hong Kong, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>An acrostic is a form of writing that the first token of each line (or other recurring features in the text) forms a meaningful sequence. In this paper we present a generalized acrostic generation system that can hide certain message in a flexible pattern specified by the users. Different from previous works that focus on rule-based solutions, here we adopt a neural- based sequence-to-sequence model to achieve this goal. Besides acrostic, users are also allowed to specify the rhyme and length of the output sequences. Based on our knowledge, this is the first neural-based natural language generation system that demonstrates the capability of performing micro-level control over output sentences.</abstract>
<identifier type="citekey">shen-etal-2019-controlling</identifier>
<identifier type="doi">10.18653/v1/D19-3008</identifier>
<location>
<url>https://aclanthology.org/D19-3008</url>
</location>
<part>
<date>2019-11</date>
<extent unit="page">
<start>43</start>
<end>48</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Controlling Sequence-to-Sequence Models - A Demonstration on Neural-based Acrostic Generator
%A Shen, Liang-Hsin
%A Tai, Pei-Lun
%A Wu, Chao-Chung
%A Lin, Shou-De
%Y Padó, Sebastian
%Y Huang, Ruihong
%S Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP): System Demonstrations
%D 2019
%8 November
%I Association for Computational Linguistics
%C Hong Kong, China
%F shen-etal-2019-controlling
%X An acrostic is a form of writing that the first token of each line (or other recurring features in the text) forms a meaningful sequence. In this paper we present a generalized acrostic generation system that can hide certain message in a flexible pattern specified by the users. Different from previous works that focus on rule-based solutions, here we adopt a neural- based sequence-to-sequence model to achieve this goal. Besides acrostic, users are also allowed to specify the rhyme and length of the output sequences. Based on our knowledge, this is the first neural-based natural language generation system that demonstrates the capability of performing micro-level control over output sentences.
%R 10.18653/v1/D19-3008
%U https://aclanthology.org/D19-3008
%U https://doi.org/10.18653/v1/D19-3008
%P 43-48
Markdown (Informal)
[Controlling Sequence-to-Sequence Models - A Demonstration on Neural-based Acrostic Generator](https://aclanthology.org/D19-3008) (Shen et al., EMNLP-IJCNLP 2019)
ACL