@InProceedings{kordjamshidi-EtAl:2016:COLING,
  author    = {Kordjamshidi, Parisa  and  Khashabi, Daniel  and  Christodoulopoulos, Christos  and  Mangipudi, Bhargav  and  Singh, Sameer  and  Roth, Dan},
  title     = {Better call Saul: Flexible Programming for Learning and Inference in NLP},
  booktitle = {Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {3030--3040},
  abstract  = {We present a novel way for designing complex joint inference and learning
	models using Saul~\cite{KordjamshidiRoWu15}, a recently-introduced declarative
	learning-based programming language (DeLBP). We enrich Saul with components
	that are necessary for a broad range of learning based Natural Language
	Processing tasks at various levels of granularity. We illustrate these advances
	using three different, well-known NLP problems, and show how these generic
	learning and inference modules can directly exploit Saul's graph-based data
	representation. These properties allow the programmer to easily switch between
	different model formulations and configurations, and consider various kinds of
	dependencies and correlations among variables of interest with minimal
	programming effort. We argue that Saul provides an extremely useful paradigm
	both for the design of advanced NLP systems and for supporting advanced
	research in NLP.},
  url       = {http://aclweb.org/anthology/C16-1285}
}

