@InProceedings{kai-EtAl:2016:COLING,
  author    = {Kai, Yang  and  Yi, Cai  and  Zhenhong, Chen  and  Ho-fung, Leung  and  Raymond, LAU},
  title     = {Exploring Topic Discriminating Power of Words in Latent Dirichlet Allocation},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {2238--2247},
  abstract  = {Latent Dirichlet Allocation (LDA) and its variants have been widely used to
	discover latent topics in textual documents. However, some of topics
	generated by LDA may be noisy with irrelevant words scattering across these
	topics. We name this kind of words as topic-indiscriminate words, which tend
	to make topics more ambiguous and less interpretable by humans. In our work,
	we propose a new topic model named TWLDA, which assigns low weights to words
	with low topic discriminating power (ability). Our experimental results show
	that the proposed approach, which effectively reduces the number of
	topic-indiscriminate words in discovered topics, improves the effectiveness
	of LDA.},
  url       = {http://aclweb.org/anthology/C16-1211}
}

