@InProceedings{xia-EtAl:2017:EACLshort,
  author    = {Xia, Yandi  and  Levine, Aaron  and  Das, Pradipto  and  Di Fabbrizio, Giuseppe  and  Shinzato, Keiji  and  Datta, Ankur},
  title     = {Large-Scale Categorization of Japanese Product Titles Using Neural Attention Models},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 2, Short Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {663--668},
  abstract  = {We propose a variant of Convolutional Neural Network (CNN) models, the
	Attention CNN (ACNN); for large-scale categorization of millions of Japanese
	items  into thirty-five product categories.
	Compared to a state-of-the-art Gradient Boosted Tree (GBT) classifier, the
	proposed model reduces training time from three weeks to three days while
	maintaining more than 96% accuracy.
	Additionally, our proposed model characterizes products by imputing attentive
	focus on word tokens in a language agnostic way.
	The attention words have been observed to be semantically highly correlated
	with the predicted categories and give us a choice of automatic feature
	extraction for downstream processing.
	Author{2}{Affiliation}},
  url       = {http://www.aclweb.org/anthology/E17-2105}
}

