Spaces:
Running
on
Zero
Running
on
Zero
use newer model
Browse files- inference.py +1 -1
inference.py
CHANGED
|
@@ -18,7 +18,7 @@ SampleFn = Callable[
|
|
| 18 |
]
|
| 19 |
### Loading
|
| 20 |
|
| 21 |
-
def load_model_from_hf(repo_id: str = 'jordand/echo-tts', device: str = 'cuda', dtype: torch.dtype | None = torch.bfloat16, compile: bool = False, token: str | None = None) -> EchoDiT:
|
| 22 |
with torch.device('meta'):
|
| 23 |
model = EchoDiT(
|
| 24 |
latent_size=80, model_size=2048, num_layers=24, num_heads=16,
|
|
|
|
| 18 |
]
|
| 19 |
### Loading
|
| 20 |
|
| 21 |
+
def load_model_from_hf(repo_id: str = 'jordand/echo-tts-base', device: str = 'cuda', dtype: torch.dtype | None = torch.bfloat16, compile: bool = False, token: str | None = None) -> EchoDiT:
|
| 22 |
with torch.device('meta'):
|
| 23 |
model = EchoDiT(
|
| 24 |
latent_size=80, model_size=2048, num_layers=24, num_heads=16,
|