Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Fix query params
Browse files- detector/server.py +5 -5
detector/server.py
CHANGED
|
@@ -7,7 +7,7 @@ from transformers import RobertaForSequenceClassification, RobertaTokenizer
|
|
| 7 |
import json
|
| 8 |
import fire
|
| 9 |
import torch
|
| 10 |
-
from urllib.parse import urlparse, unquote
|
| 11 |
|
| 12 |
|
| 13 |
model: RobertaForSequenceClassification = None
|
|
@@ -45,9 +45,10 @@ class RequestHandler(SimpleHTTPRequestHandler):
|
|
| 45 |
self.wfile.write(json.dumps({"error": str(e)}).encode('utf-8'))
|
| 46 |
|
| 47 |
def do_GET(self):
|
| 48 |
-
|
|
|
|
| 49 |
|
| 50 |
-
if not
|
| 51 |
self.begin_content('text/html')
|
| 52 |
|
| 53 |
html = os.path.join(os.path.dirname(__file__), 'index.html')
|
|
@@ -56,7 +57,7 @@ class RequestHandler(SimpleHTTPRequestHandler):
|
|
| 56 |
|
| 57 |
self.begin_content('application/json;charset=UTF-8')
|
| 58 |
|
| 59 |
-
all_tokens, used_tokens, fake, real = self.infer(
|
| 60 |
|
| 61 |
self.wfile.write(json.dumps(dict(
|
| 62 |
all_tokens=all_tokens,
|
|
@@ -147,4 +148,3 @@ def main(checkpoint, port=8080, device='cuda' if torch.cuda.is_available() else
|
|
| 147 |
|
| 148 |
if __name__ == '__main__':
|
| 149 |
fire.Fire(main)
|
| 150 |
-
|
|
|
|
| 7 |
import json
|
| 8 |
import fire
|
| 9 |
import torch
|
| 10 |
+
from urllib.parse import urlparse, unquote, parse_qs
|
| 11 |
|
| 12 |
|
| 13 |
model: RobertaForSequenceClassification = None
|
|
|
|
| 45 |
self.wfile.write(json.dumps({"error": str(e)}).encode('utf-8'))
|
| 46 |
|
| 47 |
def do_GET(self):
|
| 48 |
+
parsed = urlparse(self.path)
|
| 49 |
+
query_params = parse_qs(parsed.query)
|
| 50 |
|
| 51 |
+
if 'text' not in query_params:
|
| 52 |
self.begin_content('text/html')
|
| 53 |
|
| 54 |
html = os.path.join(os.path.dirname(__file__), 'index.html')
|
|
|
|
| 57 |
|
| 58 |
self.begin_content('application/json;charset=UTF-8')
|
| 59 |
|
| 60 |
+
all_tokens, used_tokens, fake, real = self.infer(unquote(query_params['text'][0]))
|
| 61 |
|
| 62 |
self.wfile.write(json.dumps(dict(
|
| 63 |
all_tokens=all_tokens,
|
|
|
|
| 148 |
|
| 149 |
if __name__ == '__main__':
|
| 150 |
fire.Fire(main)
|
|
|