Spaces:
Running
on
L4
Running
on
L4
| import spaces | |
| def test_gpu(): | |
| import torch | |
| from transformers import Sam3Model, Sam3Processor | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| print(f"Test GPU function works! Device: {device}") | |
| return f"GPU test successful on {device}" | |
| if __name__ == "__main__": | |
| print("Starting minimal test...") | |
| result = test_gpu() | |
| print(result) |