@article{alemany-puig-ferrer-i-cancho-2022-linear,
title = "Linear-Time Calculation of the Expected Sum of Edge Lengths in Random Projective Linearizations of Trees",
author = "Alemany-Puig, Llu{\'\i}s and
Ferrer-i-Cancho, Ramon",
journal = "Computational Linguistics",
volume = "48",
number = "3",
month = sep,
year = "2022",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/2022.cl-3.1",
doi = "10.1162/coli_a_00442",
pages = "491--516",
abstract = "The syntactic structure of a sentence is often represented using syntactic dependency trees. The sum of the distances between syntactically related words has been in the limelight for the past decades. Research on dependency distances led to the formulation of the principle of dependency distance minimization whereby words in sentences are ordered so as to minimize that sum. Numerous random baselines have been defined to carry out related quantitative studies on lan- guages. The simplest random baseline is the expected value of the sum in unconstrained random permutations of the words in the sentence, namely, when all the shufflings of the words of a sentence are allowed and equally likely. Here we focus on a popular baseline: random projective per- mutations of the words of the sentence, that is, permutations where the syntactic dependency structure is projective, a formal constraint that sentences satisfy often in languages. Thus far, the expectation of the sum of dependency distances in random projective shufflings of a sentence has been estimated approximately with a Monte Carlo procedure whose cost is of the order of Rn, where n is the number of words of the sentence and R is the number of samples; it is well known that the larger R is, the lower the error of the estimation but the larger the time cost. Here we pre- sent formulae to compute that expectation without error in time of the order of n. Furthermore, we show that star trees maximize it, and provide an algorithm to retrieve the trees that minimize it.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="alemany-puig-ferrer-i-cancho-2022-linear">
<titleInfo>
<title>Linear-Time Calculation of the Expected Sum of Edge Lengths in Random Projective Linearizations of Trees</title>
</titleInfo>
<name type="personal">
<namePart type="given">Lluís</namePart>
<namePart type="family">Alemany-Puig</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ramon</namePart>
<namePart type="family">Ferrer-i-Cancho</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>The syntactic structure of a sentence is often represented using syntactic dependency trees. The sum of the distances between syntactically related words has been in the limelight for the past decades. Research on dependency distances led to the formulation of the principle of dependency distance minimization whereby words in sentences are ordered so as to minimize that sum. Numerous random baselines have been defined to carry out related quantitative studies on lan- guages. The simplest random baseline is the expected value of the sum in unconstrained random permutations of the words in the sentence, namely, when all the shufflings of the words of a sentence are allowed and equally likely. Here we focus on a popular baseline: random projective per- mutations of the words of the sentence, that is, permutations where the syntactic dependency structure is projective, a formal constraint that sentences satisfy often in languages. Thus far, the expectation of the sum of dependency distances in random projective shufflings of a sentence has been estimated approximately with a Monte Carlo procedure whose cost is of the order of Rn, where n is the number of words of the sentence and R is the number of samples; it is well known that the larger R is, the lower the error of the estimation but the larger the time cost. Here we pre- sent formulae to compute that expectation without error in time of the order of n. Furthermore, we show that star trees maximize it, and provide an algorithm to retrieve the trees that minimize it.</abstract>
<identifier type="citekey">alemany-puig-ferrer-i-cancho-2022-linear</identifier>
<identifier type="doi">10.1162/coli_a_00442</identifier>
<location>
<url>https://aclanthology.org/2022.cl-3.1</url>
</location>
<part>
<date>2022-09</date>
<detail type="volume"><number>48</number></detail>
<detail type="issue"><number>3</number></detail>
<extent unit="page">
<start>491</start>
<end>516</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Linear-Time Calculation of the Expected Sum of Edge Lengths in Random Projective Linearizations of Trees
%A Alemany-Puig, Lluís
%A Ferrer-i-Cancho, Ramon
%J Computational Linguistics
%D 2022
%8 September
%V 48
%N 3
%I MIT Press
%C Cambridge, MA
%F alemany-puig-ferrer-i-cancho-2022-linear
%X The syntactic structure of a sentence is often represented using syntactic dependency trees. The sum of the distances between syntactically related words has been in the limelight for the past decades. Research on dependency distances led to the formulation of the principle of dependency distance minimization whereby words in sentences are ordered so as to minimize that sum. Numerous random baselines have been defined to carry out related quantitative studies on lan- guages. The simplest random baseline is the expected value of the sum in unconstrained random permutations of the words in the sentence, namely, when all the shufflings of the words of a sentence are allowed and equally likely. Here we focus on a popular baseline: random projective per- mutations of the words of the sentence, that is, permutations where the syntactic dependency structure is projective, a formal constraint that sentences satisfy often in languages. Thus far, the expectation of the sum of dependency distances in random projective shufflings of a sentence has been estimated approximately with a Monte Carlo procedure whose cost is of the order of Rn, where n is the number of words of the sentence and R is the number of samples; it is well known that the larger R is, the lower the error of the estimation but the larger the time cost. Here we pre- sent formulae to compute that expectation without error in time of the order of n. Furthermore, we show that star trees maximize it, and provide an algorithm to retrieve the trees that minimize it.
%R 10.1162/coli_a_00442
%U https://aclanthology.org/2022.cl-3.1
%U https://doi.org/10.1162/coli_a_00442
%P 491-516
Markdown (Informal)
[Linear-Time Calculation of the Expected Sum of Edge Lengths in Random Projective Linearizations of Trees](https://aclanthology.org/2022.cl-3.1) (Alemany-Puig & Ferrer-i-Cancho, CL 2022)
ACL