@inproceedings{bensemann-etal-2022-eye,
title = "Eye Gaze and Self-attention: How Humans and Transformers Attend Words in Sentences",
author = "Bensemann, Joshua and
Peng, Alex and
Benavides-Prado, Diana and
Chen, Yang and
Tan, Neset and
Corballis, Paul Michael and
Riddle, Patricia and
Witbrock, Michael",
editor = "Chersoni, Emmanuele and
Hollenstein, Nora and
Jacobs, Cassandra and
Oseki, Yohei and
Pr{\'e}vot, Laurent and
Santus, Enrico",
booktitle = "Proceedings of the Workshop on Cognitive Modeling and Computational Linguistics",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.cmcl-1.9",
doi = "10.18653/v1/2022.cmcl-1.9",
pages = "75--87",
abstract = "Attention describes cognitive processes that are important to many human phenomena including reading. The term is also used to describe the way in which transformer neural networks perform natural language processing. While attention appears to be very different under these two contexts, this paper presents an analysis of the correlations between transformer attention and overt human attention during reading tasks. An extensive analysis of human eye tracking datasets showed that the dwell times of human eye movements were strongly correlated with the attention patterns occurring in the early layers of pre-trained transformers such as BERT. Additionally, the strength of a correlation was not related to the number of parameters within a transformer. This suggests that something about the transformers{'} architecture determined how closely the two measures were correlated.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="bensemann-etal-2022-eye">
<titleInfo>
<title>Eye Gaze and Self-attention: How Humans and Transformers Attend Words in Sentences</title>
</titleInfo>
<name type="personal">
<namePart type="given">Joshua</namePart>
<namePart type="family">Bensemann</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alex</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Diana</namePart>
<namePart type="family">Benavides-Prado</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Neset</namePart>
<namePart type="family">Tan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Paul</namePart>
<namePart type="given">Michael</namePart>
<namePart type="family">Corballis</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Patricia</namePart>
<namePart type="family">Riddle</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Michael</namePart>
<namePart type="family">Witbrock</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-05</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Workshop on Cognitive Modeling and Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Emmanuele</namePart>
<namePart type="family">Chersoni</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nora</namePart>
<namePart type="family">Hollenstein</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Cassandra</namePart>
<namePart type="family">Jacobs</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yohei</namePart>
<namePart type="family">Oseki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Laurent</namePart>
<namePart type="family">Prévot</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Enrico</namePart>
<namePart type="family">Santus</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Dublin, Ireland</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Attention describes cognitive processes that are important to many human phenomena including reading. The term is also used to describe the way in which transformer neural networks perform natural language processing. While attention appears to be very different under these two contexts, this paper presents an analysis of the correlations between transformer attention and overt human attention during reading tasks. An extensive analysis of human eye tracking datasets showed that the dwell times of human eye movements were strongly correlated with the attention patterns occurring in the early layers of pre-trained transformers such as BERT. Additionally, the strength of a correlation was not related to the number of parameters within a transformer. This suggests that something about the transformers’ architecture determined how closely the two measures were correlated.</abstract>
<identifier type="citekey">bensemann-etal-2022-eye</identifier>
<identifier type="doi">10.18653/v1/2022.cmcl-1.9</identifier>
<location>
<url>https://aclanthology.org/2022.cmcl-1.9</url>
</location>
<part>
<date>2022-05</date>
<extent unit="page">
<start>75</start>
<end>87</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Eye Gaze and Self-attention: How Humans and Transformers Attend Words in Sentences
%A Bensemann, Joshua
%A Peng, Alex
%A Benavides-Prado, Diana
%A Chen, Yang
%A Tan, Neset
%A Corballis, Paul Michael
%A Riddle, Patricia
%A Witbrock, Michael
%Y Chersoni, Emmanuele
%Y Hollenstein, Nora
%Y Jacobs, Cassandra
%Y Oseki, Yohei
%Y Prévot, Laurent
%Y Santus, Enrico
%S Proceedings of the Workshop on Cognitive Modeling and Computational Linguistics
%D 2022
%8 May
%I Association for Computational Linguistics
%C Dublin, Ireland
%F bensemann-etal-2022-eye
%X Attention describes cognitive processes that are important to many human phenomena including reading. The term is also used to describe the way in which transformer neural networks perform natural language processing. While attention appears to be very different under these two contexts, this paper presents an analysis of the correlations between transformer attention and overt human attention during reading tasks. An extensive analysis of human eye tracking datasets showed that the dwell times of human eye movements were strongly correlated with the attention patterns occurring in the early layers of pre-trained transformers such as BERT. Additionally, the strength of a correlation was not related to the number of parameters within a transformer. This suggests that something about the transformers’ architecture determined how closely the two measures were correlated.
%R 10.18653/v1/2022.cmcl-1.9
%U https://aclanthology.org/2022.cmcl-1.9
%U https://doi.org/10.18653/v1/2022.cmcl-1.9
%P 75-87
Markdown (Informal)
[Eye Gaze and Self-attention: How Humans and Transformers Attend Words in Sentences](https://aclanthology.org/2022.cmcl-1.9) (Bensemann et al., CMCL 2022)
ACL
- Joshua Bensemann, Alex Peng, Diana Benavides-Prado, Yang Chen, Neset Tan, Paul Michael Corballis, Patricia Riddle, and Michael Witbrock. 2022. Eye Gaze and Self-attention: How Humans and Transformers Attend Words in Sentences. In Proceedings of the Workshop on Cognitive Modeling and Computational Linguistics, pages 75–87, Dublin, Ireland. Association for Computational Linguistics.