@InProceedings{strubell-mccallum:2017:StructPred,
  author    = {Strubell, Emma  and  McCallum, Andrew},
  title     = {Dependency Parsing with Dilated Iterated Graph CNNs},
  booktitle = {Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {1--6},
  abstract  = {Dependency parses are an effective way to inject linguistic knowledge into many
	downstream tasks, and many practitioners wish to efficiently parse sentences at
	scale. Recent advances in GPU hardware have enabled neural networks to achieve
	significant gains over the previous best models, these models still fail to
	leverage GPUs' capability for massive parallelism due to their requirement of
	sequential processing of the sentence. In response, we propose Dilated Iterated
	Graph Convolutional Neural Networks (DIG-CNNs) for
	graph-based dependency parsing, a graph convolutional architecture that allows
	for efficient end-to-end GPU parsing. In experiments on the English Penn
	TreeBank benchmark, we show that DIG-CNNs perform on par with some of the best
	neural network parsers.},
  url       = {http://www.aclweb.org/anthology/W17-4301}
}

