@InProceedings{shioda-EtAl:2017:NLPTEA,
  author    = {Shioda, Kent  and  Komachi, Mamoru  and  Ikeya, Rue  and  Mochihashi, Daichi},
  title     = {Suggesting Sentences for ESL using Kernel Embeddings},
  booktitle = {Proceedings of the 4th Workshop on Natural Language Processing Techniques for Educational Applications (NLPTEA 2017)},
  month     = {December},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {64--68},
  abstract  = {Sentence retrieval is an important NLP application for English as a Second
	Language (ESL) learners.
	ESL learners are familiar with web search engines, but generic web search
	results may not be adequate for composing documents in a specific domain.
	However, if we build our own search system specialized to a domain, it may be
	subject to the data sparseness problem.
	Recently proposed word2vec partially addresses the data sparseness problem, but
	fails to extract sentences relevant to queries owing to the modeling of the
	latent intent of the query.
	Thus, we propose a method of retrieving example sentences using kernel
	embeddings and N-gram windows.
	This method implicitly models latent intent of query and sentences, and
	alleviates the problem of noisy alignment.
	Our results show that our method achieved higher precision in sentence
	retrieval for ESL in the domain of a university press release corpus, as
	compared to a previous unsupervised method used for a semantic textual
	similarity task.},
  url       = {http://www.aclweb.org/anthology/W17-5911}
}

