@InProceedings{liu-EtAl:2017:EMNLP20172,
  author    = {Liu, Rui  and  Hu, Junjie  and  Wei, Wei  and  Yang, Zi  and  Nyberg, Eric},
  title     = {Structural Embedding of Syntactic Trees for Machine Comprehension},
  booktitle = {Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing},
  month     = {September},
  year      = {2017},
  address   = {Copenhagen, Denmark},
  publisher = {Association for Computational Linguistics},
  pages     = {815--824},
  abstract  = {Deep neural networks for machine comprehension typically utilizes only word or
	character embeddings without explicitly taking advantage of structured
	linguistic information such as constituency trees and dependency trees. In this
	paper, we propose structural embedding of syntactic trees (SEST), an algorithm
	framework to utilize structured information and encode them into vector
	representations that can boost the performance of algorithms for the machine
	comprehension. We evaluate our approach using a state-of-the-art neural
	attention model on the SQuAD dataset. Experimental results demonstrate that our
	model can accurately identify the syntactic boundaries of the sentences and
	extract answers that are syntactically coherent over the baseline methods.},
  url       = {https://www.aclweb.org/anthology/D17-1085}
}

