@inproceedings{huerta-enochian-etal-2022-kosign,
title = "{K}o{S}ign Sign Language Translation Project: Introducing The {NIASL}2021 Dataset",
author = "Huerta-Enochian, Mathew and
Lee, Du Hui and
Myung, Hye Jin and
Byun, Kang Suk and
Lee, Jun Woo",
editor = "Efthimiou, Eleni and
Fotinea, Stavroula-Evita and
Hanke, Thomas and
McDonald, John C. and
Shterionov, Dimitar and
Wolfe, Rosalee",
booktitle = "Proceedings of the 7th International Workshop on Sign Language Translation and Avatar Technology: The Junction of the Visual and the Textual: Challenges and Perspectives",
month = jun,
year = "2022",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://aclanthology.org/2022.sltat-1.9",
pages = "59--66",
abstract = "We introduce a new sign language production (SLP) and sign language translation (SLT) dataset, NIASL2021, consisting of 201,026 Korean-KSL data pairs. KSL translations of Korean source texts are represented in three formats: video recordings, keypoint position data, and time-aligned gloss annotations for each hand (using a 7,989 sign vocabulary) and for eight different non-manual signals (NMS). We evaluated our sign language elicitation methodology and found that text-based prompting had a negative effect on translation quality in terms of naturalness and comprehension. We recommend distilling text into a visual medium before translating into sign language or adding a prompt-blind review step to text-based translation methodologies.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="huerta-enochian-etal-2022-kosign">
<titleInfo>
<title>KoSign Sign Language Translation Project: Introducing The NIASL2021 Dataset</title>
</titleInfo>
<name type="personal">
<namePart type="given">Mathew</namePart>
<namePart type="family">Huerta-Enochian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Du</namePart>
<namePart type="given">Hui</namePart>
<namePart type="family">Lee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hye</namePart>
<namePart type="given">Jin</namePart>
<namePart type="family">Myung</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kang</namePart>
<namePart type="given">Suk</namePart>
<namePart type="family">Byun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jun</namePart>
<namePart type="given">Woo</namePart>
<namePart type="family">Lee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 7th International Workshop on Sign Language Translation and Avatar Technology: The Junction of the Visual and the Textual: Challenges and Perspectives</title>
</titleInfo>
<name type="personal">
<namePart type="given">Eleni</namePart>
<namePart type="family">Efthimiou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Stavroula-Evita</namePart>
<namePart type="family">Fotinea</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Thomas</namePart>
<namePart type="family">Hanke</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">John</namePart>
<namePart type="given">C</namePart>
<namePart type="family">McDonald</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dimitar</namePart>
<namePart type="family">Shterionov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Rosalee</namePart>
<namePart type="family">Wolfe</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>European Language Resources Association</publisher>
<place>
<placeTerm type="text">Marseille, France</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We introduce a new sign language production (SLP) and sign language translation (SLT) dataset, NIASL2021, consisting of 201,026 Korean-KSL data pairs. KSL translations of Korean source texts are represented in three formats: video recordings, keypoint position data, and time-aligned gloss annotations for each hand (using a 7,989 sign vocabulary) and for eight different non-manual signals (NMS). We evaluated our sign language elicitation methodology and found that text-based prompting had a negative effect on translation quality in terms of naturalness and comprehension. We recommend distilling text into a visual medium before translating into sign language or adding a prompt-blind review step to text-based translation methodologies.</abstract>
<identifier type="citekey">huerta-enochian-etal-2022-kosign</identifier>
<location>
<url>https://aclanthology.org/2022.sltat-1.9</url>
</location>
<part>
<date>2022-06</date>
<extent unit="page">
<start>59</start>
<end>66</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T KoSign Sign Language Translation Project: Introducing The NIASL2021 Dataset
%A Huerta-Enochian, Mathew
%A Lee, Du Hui
%A Myung, Hye Jin
%A Byun, Kang Suk
%A Lee, Jun Woo
%Y Efthimiou, Eleni
%Y Fotinea, Stavroula-Evita
%Y Hanke, Thomas
%Y McDonald, John C.
%Y Shterionov, Dimitar
%Y Wolfe, Rosalee
%S Proceedings of the 7th International Workshop on Sign Language Translation and Avatar Technology: The Junction of the Visual and the Textual: Challenges and Perspectives
%D 2022
%8 June
%I European Language Resources Association
%C Marseille, France
%F huerta-enochian-etal-2022-kosign
%X We introduce a new sign language production (SLP) and sign language translation (SLT) dataset, NIASL2021, consisting of 201,026 Korean-KSL data pairs. KSL translations of Korean source texts are represented in three formats: video recordings, keypoint position data, and time-aligned gloss annotations for each hand (using a 7,989 sign vocabulary) and for eight different non-manual signals (NMS). We evaluated our sign language elicitation methodology and found that text-based prompting had a negative effect on translation quality in terms of naturalness and comprehension. We recommend distilling text into a visual medium before translating into sign language or adding a prompt-blind review step to text-based translation methodologies.
%U https://aclanthology.org/2022.sltat-1.9
%P 59-66
Markdown (Informal)
[KoSign Sign Language Translation Project: Introducing The NIASL2021 Dataset](https://aclanthology.org/2022.sltat-1.9) (Huerta-Enochian et al., SLTAT 2022)
ACL
- Mathew Huerta-Enochian, Du Hui Lee, Hye Jin Myung, Kang Suk Byun, and Jun Woo Lee. 2022. KoSign Sign Language Translation Project: Introducing The NIASL2021 Dataset. In Proceedings of the 7th International Workshop on Sign Language Translation and Avatar Technology: The Junction of the Visual and the Textual: Challenges and Perspectives, pages 59–66, Marseille, France. European Language Resources Association.