@inproceedings{castro-ferreira-etal-2017-linguistic,
title = "Linguistic realisation as machine translation: Comparing different {MT} models for {AMR}-to-text generation",
author = "Castro Ferreira, Thiago and
Calixto, Iacer and
Wubben, Sander and
Krahmer, Emiel",
editor = "Alonso, Jose M. and
Bugar{\'\i}n, Alberto and
Reiter, Ehud",
booktitle = "Proceedings of the 10th International Conference on Natural Language Generation",
month = sep,
year = "2017",
address = "Santiago de Compostela, Spain",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W17-3501",
doi = "10.18653/v1/W17-3501",
pages = "1--10",
abstract = "In this paper, we study AMR-to-text generation, framing it as a translation task and comparing two different MT approaches (Phrase-based and Neural MT). We systematically study the effects of 3 AMR preprocessing steps (Delexicalisation, Compression, and Linearisation) applied before the MT phase. Our results show that preprocessing indeed helps, although the benefits differ for the two MT models.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="castro-ferreira-etal-2017-linguistic">
<titleInfo>
<title>Linguistic realisation as machine translation: Comparing different MT models for AMR-to-text generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Thiago</namePart>
<namePart type="family">Castro Ferreira</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Iacer</namePart>
<namePart type="family">Calixto</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sander</namePart>
<namePart type="family">Wubben</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Emiel</namePart>
<namePart type="family">Krahmer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 10th International Conference on Natural Language Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jose</namePart>
<namePart type="given">M</namePart>
<namePart type="family">Alonso</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alberto</namePart>
<namePart type="family">Bugarín</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ehud</namePart>
<namePart type="family">Reiter</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Santiago de Compostela, Spain</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper, we study AMR-to-text generation, framing it as a translation task and comparing two different MT approaches (Phrase-based and Neural MT). We systematically study the effects of 3 AMR preprocessing steps (Delexicalisation, Compression, and Linearisation) applied before the MT phase. Our results show that preprocessing indeed helps, although the benefits differ for the two MT models.</abstract>
<identifier type="citekey">castro-ferreira-etal-2017-linguistic</identifier>
<identifier type="doi">10.18653/v1/W17-3501</identifier>
<location>
<url>https://aclanthology.org/W17-3501</url>
</location>
<part>
<date>2017-09</date>
<extent unit="page">
<start>1</start>
<end>10</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Linguistic realisation as machine translation: Comparing different MT models for AMR-to-text generation
%A Castro Ferreira, Thiago
%A Calixto, Iacer
%A Wubben, Sander
%A Krahmer, Emiel
%Y Alonso, Jose M.
%Y Bugarín, Alberto
%Y Reiter, Ehud
%S Proceedings of the 10th International Conference on Natural Language Generation
%D 2017
%8 September
%I Association for Computational Linguistics
%C Santiago de Compostela, Spain
%F castro-ferreira-etal-2017-linguistic
%X In this paper, we study AMR-to-text generation, framing it as a translation task and comparing two different MT approaches (Phrase-based and Neural MT). We systematically study the effects of 3 AMR preprocessing steps (Delexicalisation, Compression, and Linearisation) applied before the MT phase. Our results show that preprocessing indeed helps, although the benefits differ for the two MT models.
%R 10.18653/v1/W17-3501
%U https://aclanthology.org/W17-3501
%U https://doi.org/10.18653/v1/W17-3501
%P 1-10
Markdown (Informal)
[Linguistic realisation as machine translation: Comparing different MT models for AMR-to-text generation](https://aclanthology.org/W17-3501) (Castro Ferreira et al., INLG 2017)
ACL