@inproceedings{sainz-rigau-2021-ask2transformers,
title = "{A}sk2{T}ransformers: Zero-Shot Domain labelling with Pretrained Language Models",
author = "Sainz, Oscar and
Rigau, German",
editor = "Vossen, Piek and
Fellbaum, Christiane",
booktitle = "Proceedings of the 11th Global Wordnet Conference",
month = jan,
year = "2021",
address = "University of South Africa (UNISA)",
publisher = "Global Wordnet Association",
url = "https://aclanthology.org/2021.gwc-1.6/",
pages = "44--52",
abstract = "In this paper we present a system that exploits different pre-trained Language Models for assigning domain labels to WordNet synsets without any kind of supervision. Furthermore, the system is not restricted to use a particular set of domain labels. We exploit the knowledge encoded within different off-the-shelf pre-trained Language Models and task formulations to infer the domain label of a particular WordNet definition. The proposed zero-shot system achieves a new state-of-the-art on the English dataset used in the evaluation."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="sainz-rigau-2021-ask2transformers">
<titleInfo>
<title>Ask2Transformers: Zero-Shot Domain labelling with Pretrained Language Models</title>
</titleInfo>
<name type="personal">
<namePart type="given">Oscar</namePart>
<namePart type="family">Sainz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">German</namePart>
<namePart type="family">Rigau</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-01</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 11th Global Wordnet Conference</title>
</titleInfo>
<name type="personal">
<namePart type="given">Piek</namePart>
<namePart type="family">Vossen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christiane</namePart>
<namePart type="family">Fellbaum</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Global Wordnet Association</publisher>
<place>
<placeTerm type="text">University of South Africa (UNISA)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper we present a system that exploits different pre-trained Language Models for assigning domain labels to WordNet synsets without any kind of supervision. Furthermore, the system is not restricted to use a particular set of domain labels. We exploit the knowledge encoded within different off-the-shelf pre-trained Language Models and task formulations to infer the domain label of a particular WordNet definition. The proposed zero-shot system achieves a new state-of-the-art on the English dataset used in the evaluation.</abstract>
<identifier type="citekey">sainz-rigau-2021-ask2transformers</identifier>
<location>
<url>https://aclanthology.org/2021.gwc-1.6/</url>
</location>
<part>
<date>2021-01</date>
<extent unit="page">
<start>44</start>
<end>52</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Ask2Transformers: Zero-Shot Domain labelling with Pretrained Language Models
%A Sainz, Oscar
%A Rigau, German
%Y Vossen, Piek
%Y Fellbaum, Christiane
%S Proceedings of the 11th Global Wordnet Conference
%D 2021
%8 January
%I Global Wordnet Association
%C University of South Africa (UNISA)
%F sainz-rigau-2021-ask2transformers
%X In this paper we present a system that exploits different pre-trained Language Models for assigning domain labels to WordNet synsets without any kind of supervision. Furthermore, the system is not restricted to use a particular set of domain labels. We exploit the knowledge encoded within different off-the-shelf pre-trained Language Models and task formulations to infer the domain label of a particular WordNet definition. The proposed zero-shot system achieves a new state-of-the-art on the English dataset used in the evaluation.
%U https://aclanthology.org/2021.gwc-1.6/
%P 44-52
Markdown (Informal)
[Ask2Transformers: Zero-Shot Domain labelling with Pretrained Language Models](https://aclanthology.org/2021.gwc-1.6/) (Sainz & Rigau, GWC 2021)
ACL