@inproceedings{wang-etal-2024-better, title = "Better Call {SAUL}: Fluent and Consistent Language Model Editing with Generation Regularization", author = {Wang, Mingyang and Lange, Lukas and Adel, Heike and Str{\"o}tgen, Jannik and Schuetze, Hinrich}, editor = "Al-Onaizan, Yaser and Bansal, Mohit and Chen, Yun-Nung", booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2024", month = nov, year = "2024", address = "Miami, Florida, USA", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2024.findings-emnlp.469", doi = "10.18653/v1/2024.findings-emnlp.469", pages = "7990--8000", abstract = "To ensure large language models contain up-to-date knowledge, they need to be updated regularly. However, model editing is challenging as it might also affect knowledge that is unrelated to the new data. State-of-the-art methods identify parameters associated with specific knowledge and then modify them via direct weight updates. However, these locate-and-edit methods suffer from heavy computational overhead and lack theoretical validation. In contrast, directly fine-tuning the model on requested edits affects the model{'}s behavior on unrelated knowledge, and significantly damages the model{'}s generation fluency and consistency. To address these challenges, we propose SAUL, a streamlined model editing method that uses sentence concatenation with augmented random facts for generation regularization. Evaluations on three model editing benchmarks show that is a practical and reliable solution for model editing outperforming state-of-the-art methods while maintaining generation quality and reducing computational overhead.", }