Testing new feature
Browse files
app.py
CHANGED
|
@@ -35,8 +35,10 @@ def classify_toxic(text):
|
|
| 35 |
batch = tokenizer.encode(text, return_tensors="pt")
|
| 36 |
output = model(batch).logits
|
| 37 |
probabilities = torch.nn.functional.softmax(output, dim=-1)
|
| 38 |
-
|
| 39 |
-
|
|
|
|
|
|
|
| 40 |
|
| 41 |
|
| 42 |
# -----------------------
|
|
|
|
| 35 |
batch = tokenizer.encode(text, return_tensors="pt")
|
| 36 |
output = model(batch).logits
|
| 37 |
probabilities = torch.nn.functional.softmax(output, dim=-1)
|
| 38 |
+
preds = probabilities.tolist()
|
| 39 |
+
print(f"Preds: {preds}")
|
| 40 |
+
return "Safe"
|
| 41 |
+
# return "Toxic" if preds[0] <= 0.55 else "Safe"
|
| 42 |
|
| 43 |
|
| 44 |
# -----------------------
|