@inproceedings{hassid-etal-2022-much, title = "How Much Does Attention Actually Attend? Questioning the Importance of Attention in Pretrained Transformers", author = "Hassid, Michael and Peng, Hao and Rotem, Daniel and Kasai, Jungo and Montero, Ivan and Smith, Noah A. and Schwartz, Roy", editor = "Goldberg, Yoav and Kozareva, Zornitsa and Zhang, Yue", booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2022", month = dec, year = "2022", address = "Abu Dhabi, United Arab Emirates", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2022.findings-emnlp.101/", doi = "10.18653/v1/2022.findings-emnlp.101", pages = "1403--1416" }