Spaces:
Sleeping
Sleeping
File size: 384 Bytes
a36d7fa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
import spaces
@spaces.GPU
def test_gpu():
import torch
from transformers import Sam3Model, Sam3Processor
device = "cuda" if torch.cuda.is_available() else "cpu"
print(f"Test GPU function works! Device: {device}")
return f"GPU test successful on {device}"
if __name__ == "__main__":
print("Starting minimal test...")
result = test_gpu()
print(result) |