@InProceedings{puduppully-zhang-shrivastava:2017:EACLlong,
  author    = {Puduppully, Ratish  and  Zhang, Yue  and  Shrivastava, Manish},
  title     = {Transition-Based Deep Input Linearization},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {643--654},
  abstract  = {Traditional methods for deep NLG adopt pipeline approaches comprising stages
	such as constructing syntactic input, predicting function words, linearizing
	the syntactic input and generating the surface forms. Though easier to
	visualize, pipeline approaches suffer from error propagation. In addition,
	information available across modules cannot be leveraged by all modules. We
	construct a transition-based model to jointly perform linearization, function
	word prediction and morphological generation, which considerably improves upon
	the accuracy compared to a pipelined baseline system. On a standard deep input
	linearization shared task, our system achieves the best results reported so
	far.},
  url       = {http://www.aclweb.org/anthology/E17-1061}
}

