@inproceedings{xiong-etal-2023-enhancing,
title = "Enhancing Ontology Knowledge for Domain-Specific Joint Entity and Relation Extraction",
author = "Xiong, Xiong and
Chen, Wang and
Yunfei, Liu and
Shengyang, Li",
editor = "Sun, Maosong and
Qin, Bing and
Qiu, Xipeng and
Jiang, Jing and
Han, Xianpei",
booktitle = "Proceedings of the 22nd Chinese National Conference on Computational Linguistics",
month = aug,
year = "2023",
address = "Harbin, China",
publisher = "Chinese Information Processing Society of China",
url = "https://aclanthology.org/2023.ccl-1.61",
pages = "713--725",
abstract = "{``}Pre-trained language models (PLMs) have been widely used in entity and relation extractionmethods in recent years. However, due to the semantic gap between general-domain text usedfor pre-training and domain-specific text, these methods encounter semantic redundancy anddomain semantics insufficiency when it comes to domain-specific tasks. To mitigate this issue,we propose a low-cost and effective knowledge-enhanced method to facilitate domain-specificsemantics modeling in joint entity and relation extraction. Precisely, we use ontology and entitytype descriptions as domain knowledge sources, which are encoded and incorporated into thedownstream entity and relation extraction model to improve its understanding of domain-specificinformation. We construct a dataset called SSUIE-RE for Chinese entity and relation extractionin space science and utilization domain of China Manned Space Engineering, which contains awealth of domain-specific knowledge. The experimental results on SSUIE-RE demonstrate theeffectiveness of our method, achieving a 1.4{\%} absolute improvement in relation F1 score overprevious best approach. Introduction{''}",
language = "English",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="xiong-etal-2023-enhancing">
<titleInfo>
<title>Enhancing Ontology Knowledge for Domain-Specific Joint Entity and Relation Extraction</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xiong</namePart>
<namePart type="family">Xiong</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wang</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Liu</namePart>
<namePart type="family">Yunfei</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Li</namePart>
<namePart type="family">Shengyang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 22nd Chinese National Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Maosong</namePart>
<namePart type="family">Sun</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Bing</namePart>
<namePart type="family">Qin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xipeng</namePart>
<namePart type="family">Qiu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jing</namePart>
<namePart type="family">Jiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xianpei</namePart>
<namePart type="family">Han</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Chinese Information Processing Society of China</publisher>
<place>
<placeTerm type="text">Harbin, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>“Pre-trained language models (PLMs) have been widely used in entity and relation extractionmethods in recent years. However, due to the semantic gap between general-domain text usedfor pre-training and domain-specific text, these methods encounter semantic redundancy anddomain semantics insufficiency when it comes to domain-specific tasks. To mitigate this issue,we propose a low-cost and effective knowledge-enhanced method to facilitate domain-specificsemantics modeling in joint entity and relation extraction. Precisely, we use ontology and entitytype descriptions as domain knowledge sources, which are encoded and incorporated into thedownstream entity and relation extraction model to improve its understanding of domain-specificinformation. We construct a dataset called SSUIE-RE for Chinese entity and relation extractionin space science and utilization domain of China Manned Space Engineering, which contains awealth of domain-specific knowledge. The experimental results on SSUIE-RE demonstrate theeffectiveness of our method, achieving a 1.4% absolute improvement in relation F1 score overprevious best approach. Introduction”</abstract>
<identifier type="citekey">xiong-etal-2023-enhancing</identifier>
<location>
<url>https://aclanthology.org/2023.ccl-1.61</url>
</location>
<part>
<date>2023-08</date>
<extent unit="page">
<start>713</start>
<end>725</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Enhancing Ontology Knowledge for Domain-Specific Joint Entity and Relation Extraction
%A Xiong, Xiong
%A Chen, Wang
%A Yunfei, Liu
%A Shengyang, Li
%Y Sun, Maosong
%Y Qin, Bing
%Y Qiu, Xipeng
%Y Jiang, Jing
%Y Han, Xianpei
%S Proceedings of the 22nd Chinese National Conference on Computational Linguistics
%D 2023
%8 August
%I Chinese Information Processing Society of China
%C Harbin, China
%G English
%F xiong-etal-2023-enhancing
%X “Pre-trained language models (PLMs) have been widely used in entity and relation extractionmethods in recent years. However, due to the semantic gap between general-domain text usedfor pre-training and domain-specific text, these methods encounter semantic redundancy anddomain semantics insufficiency when it comes to domain-specific tasks. To mitigate this issue,we propose a low-cost and effective knowledge-enhanced method to facilitate domain-specificsemantics modeling in joint entity and relation extraction. Precisely, we use ontology and entitytype descriptions as domain knowledge sources, which are encoded and incorporated into thedownstream entity and relation extraction model to improve its understanding of domain-specificinformation. We construct a dataset called SSUIE-RE for Chinese entity and relation extractionin space science and utilization domain of China Manned Space Engineering, which contains awealth of domain-specific knowledge. The experimental results on SSUIE-RE demonstrate theeffectiveness of our method, achieving a 1.4% absolute improvement in relation F1 score overprevious best approach. Introduction”
%U https://aclanthology.org/2023.ccl-1.61
%P 713-725
Markdown (Informal)
[Enhancing Ontology Knowledge for Domain-Specific Joint Entity and Relation Extraction](https://aclanthology.org/2023.ccl-1.61) (Xiong et al., CCL 2023)
ACL