Spaces:
Sleeping
Sleeping
Mhammad Ibrahim
commited on
Commit
·
8909e35
1
Parent(s):
ebf7a6e
Add application file
Browse files- app.py +12 -2
- requirements.txt +1 -0
app.py
CHANGED
|
@@ -26,15 +26,25 @@ model.eval()
|
|
| 26 |
|
| 27 |
|
| 28 |
def predict(im):
|
| 29 |
-
if isinstance(im, dict):
|
| 30 |
im = im['composite']
|
| 31 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
with torch.no_grad():
|
| 33 |
out = model(x)
|
|
|
|
| 34 |
probabilities = torch.nn.functional.softmax(out[0], dim=0)
|
| 35 |
values, indices = torch.topk(probabilities, 5)
|
| 36 |
return {LABELS[i]: v.item() for i, v in zip(indices, values)}
|
| 37 |
|
|
|
|
| 38 |
interface = gr.Interface(
|
| 39 |
predict,
|
| 40 |
inputs="sketchpad",
|
|
|
|
| 26 |
|
| 27 |
|
| 28 |
def predict(im):
|
| 29 |
+
if isinstance(im, dict): # For sketchpad input
|
| 30 |
im = im['composite']
|
| 31 |
+
|
| 32 |
+
# Convert to grayscale and resize to 28x28
|
| 33 |
+
import cv2
|
| 34 |
+
im_gray = cv2.cvtColor(im, cv2.COLOR_RGB2GRAY)
|
| 35 |
+
im_resized = cv2.resize(im_gray, (28, 28))
|
| 36 |
+
|
| 37 |
+
# Convert to tensor and normalize
|
| 38 |
+
x = torch.tensor(im_resized, dtype=torch.float32).unsqueeze(0).unsqueeze(0) / 255.0
|
| 39 |
+
|
| 40 |
with torch.no_grad():
|
| 41 |
out = model(x)
|
| 42 |
+
|
| 43 |
probabilities = torch.nn.functional.softmax(out[0], dim=0)
|
| 44 |
values, indices = torch.topk(probabilities, 5)
|
| 45 |
return {LABELS[i]: v.item() for i, v in zip(indices, values)}
|
| 46 |
|
| 47 |
+
|
| 48 |
interface = gr.Interface(
|
| 49 |
predict,
|
| 50 |
inputs="sketchpad",
|
requirements.txt
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
torch
|
|
|
|
| 2 |
gradio
|
| 3 |
pathlib
|
|
|
|
| 1 |
torch
|
| 2 |
+
opencv-python
|
| 3 |
gradio
|
| 4 |
pathlib
|