Spaces:
Running
on
Zero
Running
on
Zero
刘鑫
commited on
Commit
·
1f183a6
1
Parent(s):
2dd4a32
set zero gpu inference
Browse files
app.py
CHANGED
|
@@ -50,8 +50,14 @@ def get_asr_model():
|
|
| 50 |
if _asr_model is None:
|
| 51 |
from funasr import AutoModel
|
| 52 |
print("Loading ASR model...")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
_asr_model = AutoModel(
|
| 54 |
-
model="iic/SenseVoiceSmall",
|
|
|
|
| 55 |
disable_update=True,
|
| 56 |
log_level='INFO',
|
| 57 |
device="cuda:0",
|
|
|
|
| 50 |
if _asr_model is None:
|
| 51 |
from funasr import AutoModel
|
| 52 |
print("Loading ASR model...")
|
| 53 |
+
# Set ModelScope cache directory for persistence
|
| 54 |
+
cache_dir = os.path.join(os.path.expanduser("~"), ".cache", "modelscope")
|
| 55 |
+
os.makedirs(cache_dir, exist_ok=True)
|
| 56 |
+
os.environ["MODELSCOPE_CACHE"] = cache_dir
|
| 57 |
+
|
| 58 |
_asr_model = AutoModel(
|
| 59 |
+
model="iic/SenseVoiceSmall", # ModelScope model ID
|
| 60 |
+
hub="ms", # Use ModelScope Hub
|
| 61 |
disable_update=True,
|
| 62 |
log_level='INFO',
|
| 63 |
device="cuda:0",
|