@inproceedings{ek-etal-2020-punctuation,
title = "How does Punctuation Affect Neural Models in Natural Language Inference",
author = "Ek, Adam and
Bernardy, Jean-Philippe and
Chatzikyriakidis, Stergios",
booktitle = "Proceedings of the Probability and Meaning Conference (PaM 2020)",
month = jun,
year = "2020",
address = "Gothenburg",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.pam-1.15",
pages = "109--116",
abstract = "Natural Language Inference models have reached almost human-level performance but their generalisation capabilities have not been yet fully characterized. In particular, sensitivity to small changes in the data is a current area of investigation. In this paper, we focus on the effect of punctuation on such models. Our findings can be broadly summarized as follows: (1) irrelevant changes in punctuation are correctly ignored by the recent transformer models (BERT) while older RNN-based models were sensitive to them. (2) All models, both transformers and RNN-based models, are incapable of taking into account small relevant changes in the punctuation.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="ek-etal-2020-punctuation">
<titleInfo>
<title>How does Punctuation Affect Neural Models in Natural Language Inference</title>
</titleInfo>
<name type="personal">
<namePart type="given">Adam</namePart>
<namePart type="family">Ek</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jean-Philippe</namePart>
<namePart type="family">Bernardy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Stergios</namePart>
<namePart type="family">Chatzikyriakidis</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Probability and Meaning Conference (PaM 2020)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Gothenburg</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Natural Language Inference models have reached almost human-level performance but their generalisation capabilities have not been yet fully characterized. In particular, sensitivity to small changes in the data is a current area of investigation. In this paper, we focus on the effect of punctuation on such models. Our findings can be broadly summarized as follows: (1) irrelevant changes in punctuation are correctly ignored by the recent transformer models (BERT) while older RNN-based models were sensitive to them. (2) All models, both transformers and RNN-based models, are incapable of taking into account small relevant changes in the punctuation.</abstract>
<identifier type="citekey">ek-etal-2020-punctuation</identifier>
<location>
<url>https://aclanthology.org/2020.pam-1.15</url>
</location>
<part>
<date>2020-06</date>
<extent unit="page">
<start>109</start>
<end>116</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T How does Punctuation Affect Neural Models in Natural Language Inference
%A Ek, Adam
%A Bernardy, Jean-Philippe
%A Chatzikyriakidis, Stergios
%S Proceedings of the Probability and Meaning Conference (PaM 2020)
%D 2020
%8 June
%I Association for Computational Linguistics
%C Gothenburg
%F ek-etal-2020-punctuation
%X Natural Language Inference models have reached almost human-level performance but their generalisation capabilities have not been yet fully characterized. In particular, sensitivity to small changes in the data is a current area of investigation. In this paper, we focus on the effect of punctuation on such models. Our findings can be broadly summarized as follows: (1) irrelevant changes in punctuation are correctly ignored by the recent transformer models (BERT) while older RNN-based models were sensitive to them. (2) All models, both transformers and RNN-based models, are incapable of taking into account small relevant changes in the punctuation.
%U https://aclanthology.org/2020.pam-1.15
%P 109-116
Markdown (Informal)
[How does Punctuation Affect Neural Models in Natural Language Inference](https://aclanthology.org/2020.pam-1.15) (Ek et al., PaM 2020)
ACL