@InProceedings{huang-zhao-ma:2017:EMNLP2017,
  author    = {Huang, Liang  and  Zhao, Kai  and  Ma, Mingbo},
  title     = {When to Finish? Optimal Beam Search for Neural Text Generation (modulo beam size)},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {2134--2139},
  abstract  = {In neural text generation such as neural
	machine translation, summarization, and
	image captioning, beam search is widely
	used to improve the output text quality.
	However, in the neural generation set-
	ting, hypotheses can finish in different
	steps, which makes it difficult to decide
	when to end beam search to ensure op-
	timality. We propose a provably optimal
	beam search algorithm that will always re-
	turn the optimal-score complete hypothe-
	sis (modulo beam size), and finish as soon
	as the optimality is established. To counter
	neural generation’s tendency for shorter
	hypotheses, we also introduce a bounded
	length reward mechanism which allows a
	modified version of our beam search al-
	gorithm to remain optimal. Experiments
	on neural machine translation demonstrate
	that our principled beam search algorithm
	leads to improvement in BLEU score over
	previously proposed alternatives.},
  url       = {https://www.aclweb.org/anthology/D17-1227}
}

