@inproceedings{zhang-etal-2022-quantifying,
title = "Quantifying Discourse Support for Omitted Pronouns",
author = "Zhang, Shulin and
Li, Jixing and
Hale, John",
editor = "Ogrodniczuk, Maciej and
Pradhan, Sameer and
Nedoluzhko, Anna and
Ng, Vincent and
Poesio, Massimo",
booktitle = "Proceedings of the Fifth Workshop on Computational Models of Reference, Anaphora and Coreference",
month = oct,
year = "2022",
address = "Gyeongju, Republic of Korea",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.crac-1.1",
pages = "1--12",
abstract = "Pro-drop is commonly seen in many languages, but its discourse motivations have not been well characterized. Inspired by the topic chain theory in Chinese, this study shows how character-verb usage continuity distinguishes dropped pronouns from overt references to story characters. We model the choice to drop vs. not drop as a function of character-verb continuity. The results show that omitted subjects have higher character history-current verb continuity salience than non-omitted subjects. This is consistent with the idea that discourse coherence with a particular topic, such as a story character, indeed facilitates the omission of pronouns in languages and contexts where they are optional.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="zhang-etal-2022-quantifying">
<titleInfo>
<title>Quantifying Discourse Support for Omitted Pronouns</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shulin</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jixing</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">John</namePart>
<namePart type="family">Hale</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-10</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Workshop on Computational Models of Reference, Anaphora and Coreference</title>
</titleInfo>
<name type="personal">
<namePart type="given">Maciej</namePart>
<namePart type="family">Ogrodniczuk</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sameer</namePart>
<namePart type="family">Pradhan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Anna</namePart>
<namePart type="family">Nedoluzhko</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vincent</namePart>
<namePart type="family">Ng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Massimo</namePart>
<namePart type="family">Poesio</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Gyeongju, Republic of Korea</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Pro-drop is commonly seen in many languages, but its discourse motivations have not been well characterized. Inspired by the topic chain theory in Chinese, this study shows how character-verb usage continuity distinguishes dropped pronouns from overt references to story characters. We model the choice to drop vs. not drop as a function of character-verb continuity. The results show that omitted subjects have higher character history-current verb continuity salience than non-omitted subjects. This is consistent with the idea that discourse coherence with a particular topic, such as a story character, indeed facilitates the omission of pronouns in languages and contexts where they are optional.</abstract>
<identifier type="citekey">zhang-etal-2022-quantifying</identifier>
<location>
<url>https://aclanthology.org/2022.crac-1.1</url>
</location>
<part>
<date>2022-10</date>
<extent unit="page">
<start>1</start>
<end>12</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Quantifying Discourse Support for Omitted Pronouns
%A Zhang, Shulin
%A Li, Jixing
%A Hale, John
%Y Ogrodniczuk, Maciej
%Y Pradhan, Sameer
%Y Nedoluzhko, Anna
%Y Ng, Vincent
%Y Poesio, Massimo
%S Proceedings of the Fifth Workshop on Computational Models of Reference, Anaphora and Coreference
%D 2022
%8 October
%I Association for Computational Linguistics
%C Gyeongju, Republic of Korea
%F zhang-etal-2022-quantifying
%X Pro-drop is commonly seen in many languages, but its discourse motivations have not been well characterized. Inspired by the topic chain theory in Chinese, this study shows how character-verb usage continuity distinguishes dropped pronouns from overt references to story characters. We model the choice to drop vs. not drop as a function of character-verb continuity. The results show that omitted subjects have higher character history-current verb continuity salience than non-omitted subjects. This is consistent with the idea that discourse coherence with a particular topic, such as a story character, indeed facilitates the omission of pronouns in languages and contexts where they are optional.
%U https://aclanthology.org/2022.crac-1.1
%P 1-12
Markdown (Informal)
[Quantifying Discourse Support for Omitted Pronouns](https://aclanthology.org/2022.crac-1.1) (Zhang et al., CRAC 2022)
ACL
- Shulin Zhang, Jixing Li, and John Hale. 2022. Quantifying Discourse Support for Omitted Pronouns. In Proceedings of the Fifth Workshop on Computational Models of Reference, Anaphora and Coreference, pages 1–12, Gyeongju, Republic of Korea. Association for Computational Linguistics.