@InProceedings{jiang-EtAl:2016:COLING3,
  author    = {Jiang, Di  and  Shi, Lei  and  Lian, Rongzhong  and  Wu, Hua},
  title     = {Latent Topic Embedding},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {2689--2698},
  abstract  = {Topic modeling and word embedding are two important techniques for deriving
	latent semantics from data. General-purpose topic models typically work in
	coarse granularity by capturing word co-occurrence at the document/sentence
	level. In contrast, word embedding models usually work in much finer
	granularity by modeling word co-occurrence within small sliding windows. With
	the aim of deriving latent semantics by considering word co-occurrence at
	different levels of granularity,  we propose a novel model named \textit{Latent
	Topic Embedding} (LTE), which seamlessly integrates topic generation and
	embedding learning in one unified framework. We further propose an efficient
	Monte Carlo EM algorithm to estimate the parameters of interest. By retaining
	the individual advantages of topic modeling and word embedding, LTE results in
	better latent topics and word embedding. Extensive experiments verify the
	superiority of LTE over the state-of-the-arts.},
  url       = {http://aclweb.org/anthology/C16-1253}
}

