Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
d3bb466
1
Parent(s):
5907c41
Make _get_device() ZeroGPU-aware so models target CUDA
Browse filesOn ZeroGPU Spaces torch.cuda.is_available() returns False at
module-load time because the GPU is only attached inside @spaces.GPU.
Check spaces.config.Config.zero_gpu as a fallback so all models are
placed on CUDA and work correctly once the GPU is attached.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
webui.py
CHANGED
|
@@ -140,8 +140,21 @@ def _clear_target_meta_unless_example(_audio, skip_count):
|
|
| 140 |
|
| 141 |
|
| 142 |
def _get_device() -> str:
|
| 143 |
-
"""Use CUDA if available, else CPU (e.g. for CI or CPU-only environments).
|
| 144 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
|
| 146 |
|
| 147 |
def _session_dir_from_target(target_audio_path: str) -> Path:
|
|
|
|
| 140 |
|
| 141 |
|
| 142 |
def _get_device() -> str:
|
| 143 |
+
"""Use CUDA if available, else CPU (e.g. for CI or CPU-only environments).
|
| 144 |
+
|
| 145 |
+
On ZeroGPU Spaces the real GPU is only attached inside @spaces.GPU, so
|
| 146 |
+
torch.cuda.is_available() returns False at module-load time. We still
|
| 147 |
+
want models to target CUDA so they run on the GPU once it is attached.
|
| 148 |
+
"""
|
| 149 |
+
if torch.cuda.is_available():
|
| 150 |
+
return "cuda:0"
|
| 151 |
+
try:
|
| 152 |
+
from spaces.config import Config
|
| 153 |
+
if Config.zero_gpu:
|
| 154 |
+
return "cuda:0"
|
| 155 |
+
except (ImportError, AttributeError):
|
| 156 |
+
pass
|
| 157 |
+
return "cpu"
|
| 158 |
|
| 159 |
|
| 160 |
def _session_dir_from_target(target_audio_path: str) -> Path:
|