@InProceedings{zhang-EtAl:2016:COLING2,
  author    = {Zhang, Jian  and  Wu, Xiaofeng  and  Way, Andy  and  Liu, Qun},
  title     = {Fast Gated Neural Domain Adaptation: Language Model as a Case Study},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {1386--1397},
  abstract  = {Neural network training has been shown to be advantageous in many natural
	language processing applications, such as language modelling or machine
	translation. In this paper, we describe in detail a novel domain adaptation
	mechanism in neural network training. Instead of learning and adapting the
	neural network on millions of training sentences -- which can be very
	time-consuming or even infeasible in some cases -- we design a domain
	adaptation gating mechanism which can be used in recurrent neural networks and
	quickly learn the out-of-domain knowledge directly from the word vector
	representations with little speed overhead. In our experiments, we use the
	recurrent neural network language model (LM) as a case study. We show that the
	neural LM perplexity can be reduced by 7.395 and 12.011 using the proposed
	domain adaptation mechanism on the Penn Treebank and News data, respectively.
	Furthermore, we show that using the domain-adapted neural LM to re-rank the
	statistical machine translation n-best list on the French-to-English language
	pair can significantly improve translation quality.},
  url       = {http://aclweb.org/anthology/C16-1131}
}

