@InProceedings{manome-EtAl:2018:W18-65,
  author    = {Manome, Kana  and  Yoshikawa, Masashi  and  Yanaka, Hitomi  and  Martínez-Gómez, Pascual  and  Mineshima, Koji  and  Bekki, Daisuke},
  title     = {Neural sentence generation from formal semantics},
  booktitle = {Proceedings of the 11th International Conference on Natural Language Generation},
  month     = {September},
  year      = {2018},
  address   = {Tilburg University, The Netherlands},
  publisher = {Association for Computational Linguistics},
  pages     = {408--414},
  abstract  = {Sequence-to-sequence models have shown strong performance in a wide range of NLP tasks, yet their applications to sentence generation from logical representations are underdeveloped. In this paper, we present a first sequence-to-sequence model for generating sentences from logical semantic representations based on event semantics. We use a semantic parsing system based on Combinatory Categorial Grammar (CCG) to obtain data annotated with logical formulas. We augment our sequence-to-sequence model with masking for predicates to contain output sentences. We also propose a novel evaluation method for generation using Recognizing Textual Entailment (RTE): combining parsing and generation, we test whether or not the output sentence entails the original text and vice versa. The experiments showed that our model outperformed a baseline with respect to both BLEU scores and accuracies in RTE.},
  url       = {http://www.aclweb.org/anthology/W18-6549}
}

