@inproceedings{janz-etal-2021-neural,
title = "Neural Language Models vs {W}ordnet-based Semantically Enriched Representation in {CST} Relation Recognition",
author = "Janz, Arkadiusz and
Piasecki, Maciej and
W{\k{a}}torski, Piotr",
editor = "Vossen, Piek and
Fellbaum, Christiane",
booktitle = "Proceedings of the 11th Global Wordnet Conference",
month = jan,
year = "2021",
address = "University of South Africa (UNISA)",
publisher = "Global Wordnet Association",
url = "https://aclanthology.org/2021.gwc-1.26/",
pages = "223--233",
abstract = "Neural language models, including transformer-based models, that are pre-trained on very large corpora became a common way to represent text in various tasks, including recognition of textual semantic relations, e.g. Cross-document Structure Theory. Pre-trained models are usually fine tuned to downstream tasks and the obtained vectors are used as an input for deep neural classifiers. No linguistic knowledge obtained from resources and tools is utilised. In this paper we compare such universal approaches with a combination of rich graph-based linguistically motivated sentence representation and a typical neural network classifier applied to a task of recognition of CST relation in Polish. The representation describes selected levels of the sentence structure including description of lexical meanings on the basis of the wordnet (plWordNet) synsets and connected SUMO concepts. The obtained results show that in the case of difficult relations and medium size training corpus semantically enriched text representation leads to significantly better results."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="janz-etal-2021-neural">
<titleInfo>
<title>Neural Language Models vs Wordnet-based Semantically Enriched Representation in CST Relation Recognition</title>
</titleInfo>
<name type="personal">
<namePart type="given">Arkadiusz</namePart>
<namePart type="family">Janz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Maciej</namePart>
<namePart type="family">Piasecki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Piotr</namePart>
<namePart type="family">Wątorski</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-01</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 11th Global Wordnet Conference</title>
</titleInfo>
<name type="personal">
<namePart type="given">Piek</namePart>
<namePart type="family">Vossen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christiane</namePart>
<namePart type="family">Fellbaum</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Global Wordnet Association</publisher>
<place>
<placeTerm type="text">University of South Africa (UNISA)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Neural language models, including transformer-based models, that are pre-trained on very large corpora became a common way to represent text in various tasks, including recognition of textual semantic relations, e.g. Cross-document Structure Theory. Pre-trained models are usually fine tuned to downstream tasks and the obtained vectors are used as an input for deep neural classifiers. No linguistic knowledge obtained from resources and tools is utilised. In this paper we compare such universal approaches with a combination of rich graph-based linguistically motivated sentence representation and a typical neural network classifier applied to a task of recognition of CST relation in Polish. The representation describes selected levels of the sentence structure including description of lexical meanings on the basis of the wordnet (plWordNet) synsets and connected SUMO concepts. The obtained results show that in the case of difficult relations and medium size training corpus semantically enriched text representation leads to significantly better results.</abstract>
<identifier type="citekey">janz-etal-2021-neural</identifier>
<location>
<url>https://aclanthology.org/2021.gwc-1.26/</url>
</location>
<part>
<date>2021-01</date>
<extent unit="page">
<start>223</start>
<end>233</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Neural Language Models vs Wordnet-based Semantically Enriched Representation in CST Relation Recognition
%A Janz, Arkadiusz
%A Piasecki, Maciej
%A Wątorski, Piotr
%Y Vossen, Piek
%Y Fellbaum, Christiane
%S Proceedings of the 11th Global Wordnet Conference
%D 2021
%8 January
%I Global Wordnet Association
%C University of South Africa (UNISA)
%F janz-etal-2021-neural
%X Neural language models, including transformer-based models, that are pre-trained on very large corpora became a common way to represent text in various tasks, including recognition of textual semantic relations, e.g. Cross-document Structure Theory. Pre-trained models are usually fine tuned to downstream tasks and the obtained vectors are used as an input for deep neural classifiers. No linguistic knowledge obtained from resources and tools is utilised. In this paper we compare such universal approaches with a combination of rich graph-based linguistically motivated sentence representation and a typical neural network classifier applied to a task of recognition of CST relation in Polish. The representation describes selected levels of the sentence structure including description of lexical meanings on the basis of the wordnet (plWordNet) synsets and connected SUMO concepts. The obtained results show that in the case of difficult relations and medium size training corpus semantically enriched text representation leads to significantly better results.
%U https://aclanthology.org/2021.gwc-1.26/
%P 223-233
Markdown (Informal)
[Neural Language Models vs Wordnet-based Semantically Enriched Representation in CST Relation Recognition](https://aclanthology.org/2021.gwc-1.26/) (Janz et al., GWC 2021)
ACL