@InProceedings{ziser-reichart:2017:CoNLL,
  author    = {Ziser, Yftah  and  Reichart, Roi},
  title     = {Neural Structural Correspondence Learning for Domain Adaptation},
  booktitle = {Proceedings of the 21st Conference on Computational Natural Language Learning (CoNLL 2017)},
  month     = {August},
  year      = {2017},
  address   = {Vancouver, Canada},
  publisher = {Association for Computational Linguistics},
  pages     = {400--410},
  abstract  = {We introduce a neural network model that marries together ideas from two
	prominent
	strands of research on domain adaptation through representation learning:
	structural correspondence learning (SCL, (Blitzer et al., 2006)) and
	autoencoder neural networks (NNs). Our model is a three-layer NN that learns to
	encode the non-pivot features of an input example into a low dimensional
	representation, so that the existence of pivot features (features that are
	prominent in both domains and convey useful information for the NLP task) in
	the example can be decoded from that representation. The low-dimensional
	representation is then employed in a learning algorithm for the task. Moreover,
	we show how to inject pre-trained word embeddings into our model in order to
	improve
	generalization across examples with similar pivot features. We experiment with
	the
	task of cross-domain sentiment classification on 16 domain pairs and show
	substantial improvements over strong baselines.},
  url       = {http://aclweb.org/anthology/K17-1040}
}

