BibTeX record conf/emnlp/HassidPRKMS022

download as .bib file

@inproceedings{DBLP:conf/emnlp/HassidPRKMS022,
  author       = {Michael Hassid and
                  Hao Peng and
                  Daniel Rotem and
                  Jungo Kasai and
                  Ivan Montero and
                  Noah A. Smith and
                  Roy Schwartz},
  editor       = {Yoav Goldberg and
                  Zornitsa Kozareva and
                  Yue Zhang},
  title        = {How Much Does Attention Actually Attend? Questioning the Importance
                  of Attention in Pretrained Transformers},
  booktitle    = {Findings of the Association for Computational Linguistics: {EMNLP}
                  2022, Abu Dhabi, United Arab Emirates, December 7-11, 2022},
  pages        = {1403--1416},
  publisher    = {Association for Computational Linguistics},
  year         = {2022},
  url          = {https://doi.org/10.18653/v1/2022.findings-emnlp.101},
  doi          = {10.18653/V1/2022.FINDINGS-EMNLP.101},
  timestamp    = {Wed, 21 Feb 2024 11:48:05 +0100},
  biburl       = {https://dblp.org/rec/conf/emnlp/HassidPRKMS022.bib},
  bibsource    = {dblp computer science bibliography, https://dblp.org}
}
a service of  Schloss Dagstuhl - Leibniz Center for Informatics