@InProceedings{lampouras-vlachos:2016:COLING,
  author    = {Lampouras, Gerasimos  and  Vlachos, Andreas},
  title     = {Imitation learning for language generation from unaligned data},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {1101--1112},
  abstract  = {Natural language generation (NLG) is the task of generating natural language
	from a meaning representation. Current rule-based approaches require
	domain-specific and manually constructed linguistic resources, while most
	machine-learning based approaches rely on aligned training data and/or phrase
	templates. The latter are needed to restrict the search space for the
	structured prediction task defined by the unaligned datasets. In this work we
	propose the use of imitation learning for structured prediction which learns an
	incremental model that handles the large search space by avoiding explicit
	enumeration of the outputs. We focus on the Locally Optimal Learning to Search
	framework which allows us to train against non-decomposable loss functions such
	as the BLEU or ROUGE scores while not assuming gold standard alignments. We
	evaluate our approach on three datasets using both automatic measures and human
	judgements and achieve results comparable to the state-of-the-art approaches
	developed for each of them.},
  url       = {http://aclweb.org/anthology/C16-1105}
}

