@InProceedings{fonarev-EtAl:2017:Long,
  author    = {Fonarev, Alexander  and  Grinchuk, Oleksii  and  Gusev, Gleb  and  Serdyukov, Pavel  and  Oseledets, Ivan},
  title     = {Riemannian Optimization for Skip-Gram Negative Sampling},
  booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {2028--2036},
  abstract  = {Skip-Gram Negative Sampling (SGNS) word embedding model, well known by its
	implementation in ``word2vec'' software, is usually optimized by stochastic
	gradient descent. However, the optimization of SGNS objective can be viewed as
	a problem of searching for a good matrix with the low-rank constraint. The most
	standard way to solve this type of problems is to apply Riemannian optimization
	framework to optimize the SGNS objective over the manifold of required low-rank
	matrices. In this paper, we propose an algorithm that optimizes SGNS objective
	using Riemannian optimization and demonstrates its superiority over popular
	competitors, such as the original method to train SGNS and SVD over SPPMI
	matrix.},
  url       = {http://aclweb.org/anthology/P17-1185}
}

