From 31548d2df1ac2c2eeee7c1a9181009ce3984b12d Mon Sep 17 00:00:00 2001 From: Simrat Hanspal <15978688+simrathanspal@users.noreply.github.com> Date: Fri, 6 Sep 2024 21:25:18 +0530 Subject: [PATCH] is_flash_attn_available has been renamed in transformers.utils Updated is_flash_attn_available to is_flash_attn_2_available transformer.utils - https://github.com/huggingface/transformers/blob/main/src/transformers/utils/__init__.py PR - https://github.com/huggingface/transformers/pull/26785 --- medusa/model/modeling_llama_kv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/model/modeling_llama_kv.py b/medusa/model/modeling_llama_kv.py index abf9382..3d61719 100644 --- a/medusa/model/modeling_llama_kv.py +++ b/medusa/model/modeling_llama_kv.py @@ -22,7 +22,7 @@ from transformers.utils import ( add_start_docstrings, add_start_docstrings_to_model_forward, - is_flash_attn_available, + is_flash_attn_2_available, logging, replace_return_docstrings, )