@InProceedings{xiao-EtAl:2018:C18-1,
  author    = {Xiao, Liqiang  and  Zhang, Honglun  and  Chen, Wenqing  and  Wang, Yongkun  and  Jin, Yaohui},
  title     = {Learning What to Share: Leaky Multi-Task Network for Text Classification},
  booktitle = {Proceedings of the 27th International Conference on Computational Linguistics},
  month     = {August},
  year      = {2018},
  address   = {Santa Fe, New Mexico, USA},
  publisher = {Association for Computational Linguistics},
  pages     = {2055--2065},
  abstract  = {Neural network based multi-task learning has achieved great success on many NLP problems, which focuses on sharing knowledge among tasks by linking some layers to enhance the performance. However, most existing approaches suffer from the interference between tasks because they lack of selection mechanism for feature sharing. In this way, the feature spaces of tasks may be easily contaminated by helpless features borrowed from others, which will confuse the models for making correct prediction. In this paper, we propose a multi-task convolutional neural network with the Leaky Unit, which has memory and forgetting mechanism to filter the feature flows between tasks. Experiments on five different datasets for text classification validate the benefits of our approach.},
  url       = {http://www.aclweb.org/anthology/C18-1175}
}

