@InProceedings{nguyenle-EtAl:2017:I17-1,
  author    = {Nguyen Le, An  and  Martinez, Ander  and  Yoshimoto, Akifumi  and  Matsumoto, Yuji},
  title     = {Improving Sequence to Sequence Neural Machine Translation by Utilizing Syntactic Dependency Information},
  booktitle = {Proceedings of the Eighth International Joint Conference on Natural Language Processing (Volume 1: Long Papers)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {21--29},
  abstract  = {Sequence to Sequence Neural Machine Translation has achieved significant
	performance in recent years.  Yet, there are some existing issues that Neural
	Machine Translation still does not solve completely. Two of them are
	translation for long sentences and the \enquote{over-translation}. To address
	these two problems, we propose an approach that utilize more grammatical
	information such as syntactic dependencies, so that the output can be generated
	based on more abundant information. In our approach, syntactic dependencies is
	employed in decoding. In addition, the output of the model is presented not as
	a simple sequence of tokens but as a linearized tree construction. In order to
	assess the performance, we construct model based on an attention mechanism
	encoder-decoder model in which the source language is input to the encoder as a
	sequence and the decoder generates the target language as a linearized
	dependency tree structure. Experiments on the Europarl-v7 dataset of
	French-to-English translation demonstrate that our proposed method improves
	BLEU scores by 1.57 and 2.40 on datasets consisting of sentences with up to 50
	and 80 tokens, respectively. Furthermore, the proposed method also solved the
	two existing problems, ineffective translation for long sentences and
	over-translation in Neural Machine Translation.},
  url       = {http://www.aclweb.org/anthology/I17-1003}
}

