tmp-service / install_flsh_attn.py
JacobLinCool's picture
Update install_flsh_attn.py
0773d7b verified
raw
history blame contribute delete
550 Bytes
import spaces
from huggingface_hub import hf_hub_download
import subprocess
import importlib
import site
import torch
flash_attention_installed = False
try:
import flash_attn
flash_attention_installed = True
print("FlashAttention installed successfully.")
except Exception as e:
print(f"⚠️ Could not install FlashAttention: {e}")
print("Continuing without FlashAttention...")
attn_implementation = "flash_attention_2" if flash_attention_installed else "sdpa"
dtype = torch.bfloat16 if flash_attention_installed else None