diff --git a/fish_speech/models/text2semantic/llama.py b/fish_speech/models/text2semantic/llama.py index 9b423861..0b27e6f3 100644 --- a/fish_speech/models/text2semantic/llama.py +++ b/fish_speech/models/text2semantic/llama.py @@ -38,7 +38,7 @@ class ModelArgs: codebook_padding_idx: int = 0 # Use flash attention - use_flash_attention: bool = is_flash_attn_2_available() + use_flash_attention: bool = False # Gradient checkpointing use_gradient_checkpointing: bool = True