@InProceedings{das-EtAl:2016:WSSANLP2016,
  author    = {Das, Ayan  and  Yerra, Pranay  and  Kumar, Ken  and  Sarkar, Sudeshna},
  title     = {A study of attention-based neural machine translation model on Indian languages},
  booktitle = {Proceedings of the 6th Workshop on South and Southeast Asian Natural Language Processing (WSSANLP2016)},
  month     = {December},
  year      = {2016},
  address   = {Osaka, Japan},
  publisher = {The COLING 2016 Organizing Committee},
  pages     = {163--172},
  abstract  = {Neural machine translation (NMT) models have recently been shown to be very
	successful in machine translation (MT). The use of LSTMs in machine translation
	has significantly improved the translation performance for longer sentences by
	being able to capture the context and long range correlations of the sentences
	in their hidden layers. The attention model based NMT system (Bahdanau et al.,
	2014) has become the state-of-the-art, performing equal or better than other
	statistical MT approaches. In this paper, we wish to study the performance of
	the
	attention-model based NMT system (Bahdanau et al., 2014) on the Indian language
	pair, Hindi and Bengali, and do an analysis on the types or errors that occur
	in case when the languages are morphologically rich and there is a scarcity of
	large parallel training corpus. We then carry out certain post-processing
	heuristic steps to improve the quality of the translated statements and suggest
	further measures that can be carried out.
	Author{4}{Affiliation}},
  url       = {http://aclweb.org/anthology/W16-3717}
}

