@InProceedings{das-EtAl:2017:EACLlong1,
  author    = {Das, Rajarshi  and  Neelakantan, Arvind  and  Belanger, David  and  McCallum, Andrew},
  title     = {Chains of Reasoning over Entities, Relations, and Text using Recurrent Neural Networks},
  booktitle = {Proceedings of the 15th Conference of the European Chapter of the Association for Computational Linguistics: Volume 1, Long Papers},
  month     = {April},
  year      = {2017},
  address   = {Valencia, Spain},
  publisher = {Association for Computational Linguistics},
  pages     = {132--141},
  abstract  = {Our goal is to combine the rich multi-step inference of symbolic logical
	reasoning with the generalization capabilities of neural networks.  We are
	particularly interested in complex reasoning about entities and relations in
	text and large-scale knowledge bases (KBs). \newcite{neelakantan15} use RNNs to
	compose the distributed semantics of multi-hop paths in KBs; however for
	multiple reasons, the approach lacks accuracy and practicality. This paper
	proposes three significant modeling advances: (1) we learn to jointly reason
	about relations, \emph{entities, and entity-types}; (2) we use neural attention
	modeling to incorporate \emph{multiple paths}; (3) we learn to \emph{share
	strength in a single RNN} that represents logical composition across all
	relations. On a large-scale Freebase+ClueWeb prediction task, we achieve 25\%
	error reduction, and a 53\% error reduction on sparse relations due to shared
	strength. On chains of reasoning in WordNet we reduce error in mean quantile by
	84\% versus previous state-of-the-art.},
  url       = {http://www.aclweb.org/anthology/E17-1013}
}

