@InProceedings{kim-stratos-sarikaya:2016:COLING1,
  author    = {Kim, Young-Bum  and  Stratos, Karl  and  Sarikaya, Ruhi},
  title     = {Frustratingly Easy Neural Domain Adaptation},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {387--396},
  abstract  = {Popular techniques for domain adaptation such as the feature augmentation
	method of Daum\'{e} III
	(2009) have mostly been considered for sparse binary-valued features, but not
	for dense real-
	valued features such as those used in neural networks. In this paper, we
	describe simple neural
	extensions of these techniques. First, we propose a natural generalization of
	the feature augmen-
	tation method that uses K + 1 LSTMs where one model captures global patterns
	across all K
	domains and the remaining K models capture domain-specific information. Second,
	we propose
	a novel application of the framework for learning shared structures by Ando and
	Zhang (2005)
	to domain adaptation, and also provide a neural extension of their approach. In
	experiments on
	slot tagging over 17 domains, our methods give clear performance improvement
	over Daum\'{e} III
	(2009) applied on feature-rich CRFs.},
  url       = {http://aclweb.org/anthology/C16-1038}
}

