@InProceedings{rei-crichton-pyysalo:2016:COLING,
  author    = {Rei, Marek  and  Crichton, Gamal  and  Pyysalo, Sampo},
  title     = {Attending to Characters in Neural Sequence Labeling Models},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {309--318},
  abstract  = {Sequence labeling architectures use word embeddings for capturing similarity,
	but suffer when handling previously unseen or rare words. 
	We investigate character-level extensions to such models and propose a novel
	architecture for combining alternative word representations. 
	By using an attention mechanism, the model is able to dynamically decide how
	much information to use from a word- or character-level component. 
	We evaluated different architectures on a range of sequence labeling datasets,
	and character-level extensions were found to improve performance on every
	benchmark. 
	In addition, the proposed attention-based architecture delivered the best
	results even with a smaller number of trainable parameters.},
  url       = {http://aclweb.org/anthology/C16-1030}
}

