@InProceedings{bohnet-EtAl:2018:Long,
  author    = {Bohnet, Bernd  and  McDonald, Ryan  and  Simões, Gonçalo  and  Andor, Daniel  and  Pitler, Emily  and  Maynez, Joshua},
  title     = {Morphosyntactic Tagging with a Meta-BiLSTM Model over Context Sensitive Token Encodings},
  booktitle = {Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)},
  month     = {July},
  year      = {2018},
  address   = {Melbourne, Australia},
  publisher = {Association for Computational Linguistics},
  pages     = {2642--2652},
  abstract  = {The rise of neural networks, and particularly recurrent neural networks, has produced significant advances in part-of-speech tagging accuracy. One characteristic common among these models is the presence of rich initial word encodings. These encodings typically are composed of a recurrent character-based representation with dynamically and pre-trained word embeddings. However, these encodings do not consider a context wider than a single word and it is only through subsequent recurrent layers that word or sub-word information interacts. In this paper, we investigate models that use recurrent neural networks with sentence-level context for initial character and word-based representations. In particular we show that optimal results are obtained by integrating these context sensitive representations through synchronized training with a meta-model that learns to combine their states.},
  url       = {http://www.aclweb.org/anthology/P18-1246}
}

