Spaces:
Sleeping
Sleeping
| import spaces | |
| from huggingface_hub import hf_hub_download | |
| import subprocess | |
| import importlib | |
| import site | |
| import torch | |
| flash_attention_installed = False | |
| try: | |
| import flash_attn | |
| flash_attention_installed = True | |
| print("FlashAttention installed successfully.") | |
| except Exception as e: | |
| print(f"⚠️ Could not install FlashAttention: {e}") | |
| print("Continuing without FlashAttention...") | |
| attn_implementation = "flash_attention_2" if flash_attention_installed else "sdpa" | |
| dtype = torch.bfloat16 if flash_attention_installed else None | |