@inproceedings{tiesen-lishuang-2022-improving,
title = "Improving Event Temporal Relation Classification via Auxiliary Label-Aware Contrastive Learning",
author = "Tiesen, Sun and
Lishuang, Li",
booktitle = "Proceedings of the 21st Chinese National Conference on Computational Linguistics",
month = oct,
year = "2022",
address = "Nanchang, China",
publisher = "Chinese Information Processing Society of China",
url = "https://aclanthology.org/2022.ccl-1.76",
pages = "861--871",
abstract = "{``}Event Temporal Relation Classification (ETRC) is crucial to natural language understanding. In recent years, the mainstream ETRC methods may not take advantage of lots of semantic information contained in golden temporal relation labels, which is lost by the discrete one-hot labels. To alleviate the loss of semantic information, we propose learning Temporal semantic information of the golden labels by Auxiliary Contrastive Learning (TempACL). Different from traditional contrastive learning methods, which further train the PreTrained Language Model (PTLM) with unsupervised settings before fine-tuning on target tasks, we design a supervised contrastive learning framework and make three improvements. Firstly, we design a new data augmentation method that generates augmentation data via matching templates established by us with golden labels. Secondly, we propose patient contrastive learning and design three patient strategies. Thirdly we design a label-aware contrastive learning loss function. Extensive experimental results show that our TempACL effectively adapts contrastive learning to supervised learning tasks which remain a challenge in practice. TempACL achieves new state-of-the-art results on TB-Dense and MATRES and outperforms the baseline model with up to 5.37{\%}F1 on TB-Dense and 1.81{\%}F1 on MATRES.{''}",
language = "English",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="tiesen-lishuang-2022-improving">
<titleInfo>
<title>Improving Event Temporal Relation Classification via Auxiliary Label-Aware Contrastive Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sun</namePart>
<namePart type="family">Tiesen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Li</namePart>
<namePart type="family">Lishuang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-10</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 21st Chinese National Conference on Computational Linguistics</title>
</titleInfo>
<originInfo>
<publisher>Chinese Information Processing Society of China</publisher>
<place>
<placeTerm type="text">Nanchang, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>“Event Temporal Relation Classification (ETRC) is crucial to natural language understanding. In recent years, the mainstream ETRC methods may not take advantage of lots of semantic information contained in golden temporal relation labels, which is lost by the discrete one-hot labels. To alleviate the loss of semantic information, we propose learning Temporal semantic information of the golden labels by Auxiliary Contrastive Learning (TempACL). Different from traditional contrastive learning methods, which further train the PreTrained Language Model (PTLM) with unsupervised settings before fine-tuning on target tasks, we design a supervised contrastive learning framework and make three improvements. Firstly, we design a new data augmentation method that generates augmentation data via matching templates established by us with golden labels. Secondly, we propose patient contrastive learning and design three patient strategies. Thirdly we design a label-aware contrastive learning loss function. Extensive experimental results show that our TempACL effectively adapts contrastive learning to supervised learning tasks which remain a challenge in practice. TempACL achieves new state-of-the-art results on TB-Dense and MATRES and outperforms the baseline model with up to 5.37%F1 on TB-Dense and 1.81%F1 on MATRES.”</abstract>
<identifier type="citekey">tiesen-lishuang-2022-improving</identifier>
<location>
<url>https://aclanthology.org/2022.ccl-1.76</url>
</location>
<part>
<date>2022-10</date>
<extent unit="page">
<start>861</start>
<end>871</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Improving Event Temporal Relation Classification via Auxiliary Label-Aware Contrastive Learning
%A Tiesen, Sun
%A Lishuang, Li
%S Proceedings of the 21st Chinese National Conference on Computational Linguistics
%D 2022
%8 October
%I Chinese Information Processing Society of China
%C Nanchang, China
%G English
%F tiesen-lishuang-2022-improving
%X “Event Temporal Relation Classification (ETRC) is crucial to natural language understanding. In recent years, the mainstream ETRC methods may not take advantage of lots of semantic information contained in golden temporal relation labels, which is lost by the discrete one-hot labels. To alleviate the loss of semantic information, we propose learning Temporal semantic information of the golden labels by Auxiliary Contrastive Learning (TempACL). Different from traditional contrastive learning methods, which further train the PreTrained Language Model (PTLM) with unsupervised settings before fine-tuning on target tasks, we design a supervised contrastive learning framework and make three improvements. Firstly, we design a new data augmentation method that generates augmentation data via matching templates established by us with golden labels. Secondly, we propose patient contrastive learning and design three patient strategies. Thirdly we design a label-aware contrastive learning loss function. Extensive experimental results show that our TempACL effectively adapts contrastive learning to supervised learning tasks which remain a challenge in practice. TempACL achieves new state-of-the-art results on TB-Dense and MATRES and outperforms the baseline model with up to 5.37%F1 on TB-Dense and 1.81%F1 on MATRES.”
%U https://aclanthology.org/2022.ccl-1.76
%P 861-871
Markdown (Informal)
[Improving Event Temporal Relation Classification via Auxiliary Label-Aware Contrastive Learning](https://aclanthology.org/2022.ccl-1.76) (Tiesen & Lishuang, CCL 2022)
ACL