@InProceedings{yin-neubig:2017:Long,
  author    = {Yin, Pengcheng  and  Neubig, Graham},
  title     = {A Syntactic Neural Model for General-Purpose Code Generation},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {440--450},
  abstract  = {We consider the problem of parsing natural language descriptions into source
	code written in a general-purpose programming language like Python. Existing
	data-driven methods treat this problem as a language generation task without
	considering the underlying syntax of the target programming language. Informed
	by previous work in semantic parsing, in this paper we propose a novel neural
	architecture powered by a grammar model to explicitly capture the target syntax
	as prior knowledge. Experiments find this an effective way to scale up to
	generation of complex programs from natural language descriptions, achieving
	state-of-the-art results that well outperform previous code generation and
	semantic parsing approaches.},
  url       = {http://aclweb.org/anthology/P17-1041}
}

