@inproceedings{stamp-etal-2022-capturing,
title = "Capturing Distalization",
author = "Stamp, Rose and
Khatib, Lilyana and
Hel-Or, Hagit",
editor = "Efthimiou, Eleni and
Fotinea, Stavroula-Evita and
Hanke, Thomas and
Hochgesang, Julie A. and
Kristoffersen, Jette and
Mesch, Johanna and
Schulder, Marc",
booktitle = "Proceedings of the LREC2022 10th Workshop on the Representation and Processing of Sign Languages: Multilingual Sign Language Resources",
month = jun,
year = "2022",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://aclanthology.org/2022.signlang-1.29",
pages = "187--191",
abstract = "Coding and analyzing large amounts of video data is a challenge for sign language researchers, who traditionally code 2D video data manually. In recent years, the implementation of 3D motion capture technology as a means of automatically tracking movement in sign language data has been an important step forward. Several studies show that motion capture technologies can measure sign language movement parameters {--} such as volume, speed, variance {--} with high accuracy and objectivity. In this paper, using motion capture technology and machine learning, we attempt to automatically measure a more complex feature in sign language known as distalization. In general, distalized signs use the joints further from the torso (such as the wrist), however, the measure is relative and therefore distalization is not straightforward to measure. The development of a reliable and automatic measure of distalization using motion tracking technology is of special interest in many fields of sign language research.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="stamp-etal-2022-capturing">
<titleInfo>
<title>Capturing Distalization</title>
</titleInfo>
<name type="personal">
<namePart type="given">Rose</namePart>
<namePart type="family">Stamp</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lilyana</namePart>
<namePart type="family">Khatib</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hagit</namePart>
<namePart type="family">Hel-Or</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the LREC2022 10th Workshop on the Representation and Processing of Sign Languages: Multilingual Sign Language Resources</title>
</titleInfo>
<name type="personal">
<namePart type="given">Eleni</namePart>
<namePart type="family">Efthimiou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Stavroula-Evita</namePart>
<namePart type="family">Fotinea</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Thomas</namePart>
<namePart type="family">Hanke</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Julie</namePart>
<namePart type="given">A</namePart>
<namePart type="family">Hochgesang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jette</namePart>
<namePart type="family">Kristoffersen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Johanna</namePart>
<namePart type="family">Mesch</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marc</namePart>
<namePart type="family">Schulder</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>European Language Resources Association</publisher>
<place>
<placeTerm type="text">Marseille, France</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Coding and analyzing large amounts of video data is a challenge for sign language researchers, who traditionally code 2D video data manually. In recent years, the implementation of 3D motion capture technology as a means of automatically tracking movement in sign language data has been an important step forward. Several studies show that motion capture technologies can measure sign language movement parameters – such as volume, speed, variance – with high accuracy and objectivity. In this paper, using motion capture technology and machine learning, we attempt to automatically measure a more complex feature in sign language known as distalization. In general, distalized signs use the joints further from the torso (such as the wrist), however, the measure is relative and therefore distalization is not straightforward to measure. The development of a reliable and automatic measure of distalization using motion tracking technology is of special interest in many fields of sign language research.</abstract>
<identifier type="citekey">stamp-etal-2022-capturing</identifier>
<location>
<url>https://aclanthology.org/2022.signlang-1.29</url>
</location>
<part>
<date>2022-06</date>
<extent unit="page">
<start>187</start>
<end>191</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Capturing Distalization
%A Stamp, Rose
%A Khatib, Lilyana
%A Hel-Or, Hagit
%Y Efthimiou, Eleni
%Y Fotinea, Stavroula-Evita
%Y Hanke, Thomas
%Y Hochgesang, Julie A.
%Y Kristoffersen, Jette
%Y Mesch, Johanna
%Y Schulder, Marc
%S Proceedings of the LREC2022 10th Workshop on the Representation and Processing of Sign Languages: Multilingual Sign Language Resources
%D 2022
%8 June
%I European Language Resources Association
%C Marseille, France
%F stamp-etal-2022-capturing
%X Coding and analyzing large amounts of video data is a challenge for sign language researchers, who traditionally code 2D video data manually. In recent years, the implementation of 3D motion capture technology as a means of automatically tracking movement in sign language data has been an important step forward. Several studies show that motion capture technologies can measure sign language movement parameters – such as volume, speed, variance – with high accuracy and objectivity. In this paper, using motion capture technology and machine learning, we attempt to automatically measure a more complex feature in sign language known as distalization. In general, distalized signs use the joints further from the torso (such as the wrist), however, the measure is relative and therefore distalization is not straightforward to measure. The development of a reliable and automatic measure of distalization using motion tracking technology is of special interest in many fields of sign language research.
%U https://aclanthology.org/2022.signlang-1.29
%P 187-191
Markdown (Informal)
[Capturing Distalization](https://aclanthology.org/2022.signlang-1.29) (Stamp et al., SignLang 2022)
ACL
- Rose Stamp, Lilyana Khatib, and Hagit Hel-Or. 2022. Capturing Distalization. In Proceedings of the LREC2022 10th Workshop on the Representation and Processing of Sign Languages: Multilingual Sign Language Resources, pages 187–191, Marseille, France. European Language Resources Association.