@InProceedings{kunchukuttan-EtAl:2017:I17-2,
  author    = {Kunchukuttan, Anoop  and  Shah, Maulik  and  Prakash, Pradyot  and  Bhattacharyya, Pushpak},
  title     = {Utilizing Lexical Similarity between Related, Low-resource Languages for Pivot-based SMT},
  booktitle = {Proceedings of the Eighth International Joint Conference on Natural Language Processing (Volume 2: Short Papers)},
  month     = {November},
  year      = {2017},
  address   = {Taipei, Taiwan},
  publisher = {Asian Federation of Natural Language Processing},
  pages     = {283--289},
  abstract  = {We investigate pivot-based translation between related languages in a low
	resource, phrase-based SMT setting. We show that a subword-level pivot-based
	SMT model using a related pivot language is substantially better than word and
	morpheme-level pivot models. It is also highly competitive with the best direct
	translation model, which is encouraging as no direct source-target training
	corpus is used. 
	We also show that combining multiple related language pivot models can rival a
	direct translation model. 
	Thus, the use of subwords as translation units coupled with multiple related
	pivot languages can compensate for the lack of a direct parallel corpus.},
  url       = {http://www.aclweb.org/anthology/I17-2048}
}

