@InProceedings{liu-EtAl:2018:C18-12,
  author    = {Liu, Qian  and  Huang, Heyan  and  Gao, Yang  and  Wei, Xiaochi  and  Tian, Yuxin  and  Liu, Luyang},
  title     = {Task-oriented Word Embedding for Text Classification},
  booktitle = {Proceedings of the 27th International Conference on Computational Linguistics},
  month     = {August},
  year      = {2018},
  address   = {Santa Fe, New Mexico, USA},
  publisher = {Association for Computational Linguistics},
  pages     = {2023--2032},
  abstract  = {Distributed word representation plays a pivotal role in various natural language processing tasks. In spite of its success, most existing methods only consider contextual information, which is suboptimal when used in various tasks due to a lack of task-specific features. The rational word embeddings should have the ability to capture both the semantic features and task-specific features of words. In this paper, we propose a task-oriented word embedding method and apply it to the text classification task. With the function-aware component, our method regularizes the distribution of words to enable the embedding space to have a clear classification boundary. We evaluate our method using five text classification datasets. The experiment results show that our method significantly outperforms the state-of-the-art methods.},
  url       = {http://www.aclweb.org/anthology/C18-1172}
}

