Spaces:
Sleeping
Sleeping
File size: 550 Bytes
d9ce662 0773d7b d9ce662 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
import spaces
from huggingface_hub import hf_hub_download
import subprocess
import importlib
import site
import torch
flash_attention_installed = False
try:
import flash_attn
flash_attention_installed = True
print("FlashAttention installed successfully.")
except Exception as e:
print(f"⚠️ Could not install FlashAttention: {e}")
print("Continuing without FlashAttention...")
attn_implementation = "flash_attention_2" if flash_attention_installed else "sdpa"
dtype = torch.bfloat16 if flash_attention_installed else None
|