@inproceedings{40a945d2f6574e7e9ba1a26608643d47,
title = "CogMemLM: Human-Like Memory Mechanisms Improve Performance and Cognitive Plausibility of LLMs",
author = "Lukas Thoma and Ivonne Weyers and Erion {\c C}ano and Stefan Schweter and Mueller, {Jutta L.} and Benjamin Roth",
note = "Funding Information: The present research was funded by the Go!Digital 3.0 grant program of the Austrian Academy of Sciences (GD3.0 2021-18 CogML). We thank our intern student C{\'e}lestin Eve for assisting us in this project. Furthermore, we would like to thank Google{\textquoteright}s TPU Research Cloud (TRC) program for giving us access to TPUs that were used for training our BabyLM models. We would also like to thank Hugging Face for providing the ability to host and perform inferencing of our models on the Hugging Face Model Hub.; BabyLM Challenge at the 27th Conference on Computational Natural Language Learning, CoNLL 2023 ; Conference date: 06-12-2023 Through 07-12-2023",
year = "2023",
doi = "10.18653/v1/2023.conll-babylm.15",
language = "English",
isbn = "978-1-952148-02-6",
pages = "180--185",
booktitle = "The BabyLM Challenge at the 27th Conference on Computational Natural Language Learning",
publisher = "Association for Computational Linguistics (ACL)",
}