@InProceedings{liang-shu:2017:I17-2,
  author    = {Liang, Davis  and  Shu, Yan},
  title     = {Deep Automated Multi-task Learning},
  booktitle = {Proceedings of the Eighth International Joint Conference on Natural Language Processing (Volume 2: Short Papers)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {55--60},
  abstract  = {Multi-task learning (MTL) has recently contributed to learning better
	representations in service of various NLP tasks. MTL aims at improving the
	performance of a primary task by jointly training on a secondary task. This
	paper introduces automated tasks, which exploit the sequential nature of the
	input data, as secondary tasks in an MTL model. We explore next word
	prediction, next character prediction, and missing word completion as potential
	automated tasks. Our results show that training on a primary task in parallel
	with a secondary automated task improves both the convergence speed and
	accuracy for the primary task. We suggest two methods for augmenting an
	existing network with automated tasks and establish better performance in topic
	prediction, sentiment analysis, and hashtag recommendation. Finally, we show
	that the MTL models can perform well on datasets that are small and colloquial
	by nature.},
  url       = {http://www.aclweb.org/anthology/I17-2010}
}

