@inproceedings{rychly-teslia-2023-muni,
title = "{MUNI}-{NLP} Submission for {C}zech-{U}krainian Translation Task at {WMT}23",
author = "Rychly, Pavel and
Teslia, Yuliia",
editor = "Koehn, Philipp and
Haddow, Barry and
Kocmi, Tom and
Monz, Christof",
booktitle = "Proceedings of the Eighth Conference on Machine Translation",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.wmt-1.14",
doi = "10.18653/v1/2023.wmt-1.14",
pages = "162--165",
abstract = "The system is trained on officialy provided data only. We have heavily filtered all the data to remove machine translated text, Russian text and other noise. We use the DeepNorm modification of the transformer architecture in the TorchScale library with 18 encoder layers and 6 decoder layers. The initial systems for backtranslation uses HFT tokenizer, the final system uses custom tokenizer derived from HFT.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="rychly-teslia-2023-muni">
<titleInfo>
<title>MUNI-NLP Submission for Czech-Ukrainian Translation Task at WMT23</title>
</titleInfo>
<name type="personal">
<namePart type="given">Pavel</namePart>
<namePart type="family">Rychly</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yuliia</namePart>
<namePart type="family">Teslia</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Eighth Conference on Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Philipp</namePart>
<namePart type="family">Koehn</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Barry</namePart>
<namePart type="family">Haddow</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tom</namePart>
<namePart type="family">Kocmi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christof</namePart>
<namePart type="family">Monz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Singapore</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>The system is trained on officialy provided data only. We have heavily filtered all the data to remove machine translated text, Russian text and other noise. We use the DeepNorm modification of the transformer architecture in the TorchScale library with 18 encoder layers and 6 decoder layers. The initial systems for backtranslation uses HFT tokenizer, the final system uses custom tokenizer derived from HFT.</abstract>
<identifier type="citekey">rychly-teslia-2023-muni</identifier>
<identifier type="doi">10.18653/v1/2023.wmt-1.14</identifier>
<location>
<url>https://aclanthology.org/2023.wmt-1.14</url>
</location>
<part>
<date>2023-12</date>
<extent unit="page">
<start>162</start>
<end>165</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T MUNI-NLP Submission for Czech-Ukrainian Translation Task at WMT23
%A Rychly, Pavel
%A Teslia, Yuliia
%Y Koehn, Philipp
%Y Haddow, Barry
%Y Kocmi, Tom
%Y Monz, Christof
%S Proceedings of the Eighth Conference on Machine Translation
%D 2023
%8 December
%I Association for Computational Linguistics
%C Singapore
%F rychly-teslia-2023-muni
%X The system is trained on officialy provided data only. We have heavily filtered all the data to remove machine translated text, Russian text and other noise. We use the DeepNorm modification of the transformer architecture in the TorchScale library with 18 encoder layers and 6 decoder layers. The initial systems for backtranslation uses HFT tokenizer, the final system uses custom tokenizer derived from HFT.
%R 10.18653/v1/2023.wmt-1.14
%U https://aclanthology.org/2023.wmt-1.14
%U https://doi.org/10.18653/v1/2023.wmt-1.14
%P 162-165
Markdown (Informal)
[MUNI-NLP Submission for Czech-Ukrainian Translation Task at WMT23](https://aclanthology.org/2023.wmt-1.14) (Rychly & Teslia, WMT 2023)
ACL