@InProceedings{kim-EtAl:2017:EMNLP2017,
  author    = {Kim, Joo-Kyung  and  Kim, Young-Bum  and  Sarikaya, Ruhi  and  Fosler-Lussier, Eric},
  title     = {Cross-Lingual Transfer Learning for POS Tagging without Cross-Lingual Resources},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {2832--2838},
  abstract  = {Training a POS tagging model with crosslingual transfer learning usually
	requires linguistic knowledge and resources about the relation between the
	source language and the target language. In this paper, we introduce a
	cross-lingual transfer learning model for POS tagging without ancillary
	resources such as parallel corpora. The proposed cross-lingual model utilizes a
	common BLSTM that enables knowledge transfer from other languages, and private
	BLSTMs for language-specific representations. The cross-lingual model is
	trained with language-adversarial training and bidirectional language modeling
	as auxiliary objectives to better represent language-general information while
	not losing the information about a specific target language. Evaluating on POS
	datasets from 14 languages in the Universal Dependencies corpus, we show that
	the proposed transfer learning model improves
	the POS tagging performance of the target languages without exploiting any
	linguistic knowledge between the source language and the target language.},
  url       = {https://www.aclweb.org/anthology/D17-1302}
}

