@InProceedings{botha-EtAl:2017:EMNLP2017,
  author    = {Botha, Jan A.  and  Pitler, Emily  and  Ma, Ji  and  Bakalov, Anton  and  Salcianu, Alex  and  Weiss, David  and  McDonald, Ryan  and  Petrov, Slav},
  title     = {Natural Language Processing with Small Feed-Forward Networks},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {2879--2885},
  abstract  = {We show that small and shallow feed-forward neural networks can achieve near
	state-of-the-art results on a range of unstructured and structured language
	processing tasks while being considerably cheaper in memory and computational
	requirements than deep recurrent models.
	Motivated by resource-constrained environments like mobile phones, we showcase
	simple techniques for obtaining such small neural network models, and
	investigate different tradeoffs when deciding how to allocate a small memory
	budget.},
  url       = {https://www.aclweb.org/anthology/D17-1309}
}

