Spaces:
Running
on
Zero
Running
on
Zero
Expose @spaces.GPU decorator with shim
Browse files- __pycache__/app.cpython-313.pyc +0 -0
- app.py +10 -10
__pycache__/app.cpython-313.pyc
CHANGED
|
Binary files a/__pycache__/app.cpython-313.pyc and b/__pycache__/app.cpython-313.pyc differ
|
|
|
app.py
CHANGED
|
@@ -12,7 +12,15 @@ from pydantic import BaseModel
|
|
| 12 |
try:
|
| 13 |
import spaces # type: ignore
|
| 14 |
except Exception: # pragma: no cover
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
from transformers import (
|
| 18 |
AutoModelForCausalLM,
|
|
@@ -70,16 +78,8 @@ class GenerateResponse(BaseModel):
|
|
| 70 |
|
| 71 |
_MODEL = None
|
| 72 |
|
| 73 |
-
if spaces is None: # pragma: no cover - local testing path
|
| 74 |
-
def gpu_decorator(*args, **kwargs): # type: ignore
|
| 75 |
-
def identity(fn):
|
| 76 |
-
return fn
|
| 77 |
-
return identity
|
| 78 |
-
else:
|
| 79 |
-
gpu_decorator = spaces.GPU
|
| 80 |
-
|
| 81 |
|
| 82 |
-
@
|
| 83 |
def get_model() -> AutoModelForCausalLM:
|
| 84 |
global _MODEL
|
| 85 |
if _MODEL is None:
|
|
|
|
| 12 |
try:
|
| 13 |
import spaces # type: ignore
|
| 14 |
except Exception: # pragma: no cover
|
| 15 |
+
class _SpacesShim: # fallback for local runs
|
| 16 |
+
@staticmethod
|
| 17 |
+
def GPU(*_args, **_kwargs):
|
| 18 |
+
def identity(fn):
|
| 19 |
+
return fn
|
| 20 |
+
|
| 21 |
+
return identity
|
| 22 |
+
|
| 23 |
+
spaces = _SpacesShim()
|
| 24 |
|
| 25 |
from transformers import (
|
| 26 |
AutoModelForCausalLM,
|
|
|
|
| 78 |
|
| 79 |
_MODEL = None
|
| 80 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
|
| 82 |
+
@spaces.GPU(duration=120)
|
| 83 |
def get_model() -> AutoModelForCausalLM:
|
| 84 |
global _MODEL
|
| 85 |
if _MODEL is None:
|