@inproceedings{zhang-etal-2020-semantic,
title = "Semantic-aware {C}hinese Zero Pronoun Resolution with Pre-trained Semantic Dependency Parser",
author = "Zhang, Lanqiu and
Shen, Zizhuo and
Shao, Yanqiu",
editor = "Sun, Maosong and
Li, Sujian and
Zhang, Yue and
Liu, Yang",
booktitle = "Proceedings of the 19th Chinese National Conference on Computational Linguistics",
month = oct,
year = "2020",
address = "Haikou, China",
publisher = "Chinese Information Processing Society of China",
url = "https://aclanthology.org/2020.ccl-1.77",
pages = "831--841",
abstract = "Deep learning-based Chinese zero pronoun resolution model has achieved better performance than traditional machine learning-based model. However, the existing work related to Chinese zero pronoun resolution has not yet well integrated linguistic information into the deep learningbased Chinese zero pronoun resolution model. This paper adopts the idea based on the pre-trained model, and integrates the semantic representations in the pre-trained Chinese semantic dependency graph parser into the Chinese zero pronoun resolution model. The experimental results on OntoNotes-5.0 dataset show that our proposed Chinese zero pronoun resolution model with pretrained Chinese semantic dependency parser improves the F-score by 0.4{\%} compared with our baseline model, and obtains better results than other deep learning-based Chinese zero pronoun resolution models. In addition, we integrate the BERT representations into our model so that the performance of our model was improved by 0.7{\%} compared with our baseline model.",
language = "English",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="zhang-etal-2020-semantic">
<titleInfo>
<title>Semantic-aware Chinese Zero Pronoun Resolution with Pre-trained Semantic Dependency Parser</title>
</titleInfo>
<name type="personal">
<namePart type="given">Lanqiu</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zizhuo</namePart>
<namePart type="family">Shen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yanqiu</namePart>
<namePart type="family">Shao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-10</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 19th Chinese National Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Maosong</namePart>
<namePart type="family">Sun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sujian</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yue</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Chinese Information Processing Society of China</publisher>
<place>
<placeTerm type="text">Haikou, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Deep learning-based Chinese zero pronoun resolution model has achieved better performance than traditional machine learning-based model. However, the existing work related to Chinese zero pronoun resolution has not yet well integrated linguistic information into the deep learningbased Chinese zero pronoun resolution model. This paper adopts the idea based on the pre-trained model, and integrates the semantic representations in the pre-trained Chinese semantic dependency graph parser into the Chinese zero pronoun resolution model. The experimental results on OntoNotes-5.0 dataset show that our proposed Chinese zero pronoun resolution model with pretrained Chinese semantic dependency parser improves the F-score by 0.4% compared with our baseline model, and obtains better results than other deep learning-based Chinese zero pronoun resolution models. In addition, we integrate the BERT representations into our model so that the performance of our model was improved by 0.7% compared with our baseline model.</abstract>
<identifier type="citekey">zhang-etal-2020-semantic</identifier>
<location>
<url>https://aclanthology.org/2020.ccl-1.77</url>
</location>
<part>
<date>2020-10</date>
<extent unit="page">
<start>831</start>
<end>841</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Semantic-aware Chinese Zero Pronoun Resolution with Pre-trained Semantic Dependency Parser
%A Zhang, Lanqiu
%A Shen, Zizhuo
%A Shao, Yanqiu
%Y Sun, Maosong
%Y Li, Sujian
%Y Zhang, Yue
%Y Liu, Yang
%S Proceedings of the 19th Chinese National Conference on Computational Linguistics
%D 2020
%8 October
%I Chinese Information Processing Society of China
%C Haikou, China
%G English
%F zhang-etal-2020-semantic
%X Deep learning-based Chinese zero pronoun resolution model has achieved better performance than traditional machine learning-based model. However, the existing work related to Chinese zero pronoun resolution has not yet well integrated linguistic information into the deep learningbased Chinese zero pronoun resolution model. This paper adopts the idea based on the pre-trained model, and integrates the semantic representations in the pre-trained Chinese semantic dependency graph parser into the Chinese zero pronoun resolution model. The experimental results on OntoNotes-5.0 dataset show that our proposed Chinese zero pronoun resolution model with pretrained Chinese semantic dependency parser improves the F-score by 0.4% compared with our baseline model, and obtains better results than other deep learning-based Chinese zero pronoun resolution models. In addition, we integrate the BERT representations into our model so that the performance of our model was improved by 0.7% compared with our baseline model.
%U https://aclanthology.org/2020.ccl-1.77
%P 831-841
Markdown (Informal)
[Semantic-aware Chinese Zero Pronoun Resolution with Pre-trained Semantic Dependency Parser](https://aclanthology.org/2020.ccl-1.77) (Zhang et al., CCL 2020)
ACL