Spaces:
Sleeping
Sleeping
update some files
Browse files- app.py +20 -0
- retreival_launch.sh +0 -11
- retrieval_launch.sh +1 -2
- retrieval_server.py +1 -1
app.py
CHANGED
|
@@ -5,6 +5,26 @@ import re
|
|
| 5 |
import gradio as gr
|
| 6 |
from threading import Thread
|
| 7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
# --- Configuration --------------------------------------------------
|
| 9 |
|
| 10 |
# 1. DEFINE YOUR MODEL
|
|
|
|
| 5 |
import gradio as gr
|
| 6 |
from threading import Thread
|
| 7 |
|
| 8 |
+
import subprocess
|
| 9 |
+
import time
|
| 10 |
+
import atexit
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
server_process = subprocess.Popen(["bash", "retrieval_launch.sh"])
|
| 14 |
+
print(f"Server process started with PID: {server_process.pid}")
|
| 15 |
+
|
| 16 |
+
# Register a function to kill the server when app.py exits
|
| 17 |
+
def cleanup():
|
| 18 |
+
print("Shutting down retrieval server...")
|
| 19 |
+
server_process.terminate()
|
| 20 |
+
server_process.wait()
|
| 21 |
+
print("Server process terminated.")
|
| 22 |
+
|
| 23 |
+
atexit.register(cleanup)
|
| 24 |
+
except Exception as e:
|
| 25 |
+
print(f"Failed to start retrieval_launch.sh: {e}")
|
| 26 |
+
print("WARNING: The retrieval server may not be running.")
|
| 27 |
+
|
| 28 |
# --- Configuration --------------------------------------------------
|
| 29 |
|
| 30 |
# 1. DEFINE YOUR MODEL
|
retreival_launch.sh
DELETED
|
@@ -1,11 +0,0 @@
|
|
| 1 |
-
|
| 2 |
-
file_path=./data
|
| 3 |
-
index_file=$file_path/e5_Flat.index
|
| 4 |
-
corpus_file=$file_path/wiki-18.jsonl
|
| 5 |
-
retriever=intfloat/e5-base-v2
|
| 6 |
-
|
| 7 |
-
export CUDA_VISIBLE_DEVICES="1,3"
|
| 8 |
-
python search_r1/search/retrieval_server.py --index_path $index_file \
|
| 9 |
-
--corpus_path $corpus_file \
|
| 10 |
-
--topk 3 \
|
| 11 |
-
--retriever_model $retriever
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
retrieval_launch.sh
CHANGED
|
@@ -8,5 +8,4 @@ export CUDA_VISIBLE_DEVICES="1,3"
|
|
| 8 |
python retrieval_server.py --index_path $index_file \
|
| 9 |
--corpus_path $corpus_file \
|
| 10 |
--topk 3 \
|
| 11 |
-
--retriever_model $retriever
|
| 12 |
-
--faiss_gpu False
|
|
|
|
| 8 |
python retrieval_server.py --index_path $index_file \
|
| 9 |
--corpus_path $corpus_file \
|
| 10 |
--topk 3 \
|
| 11 |
+
--retriever_model $retriever
|
|
|
retrieval_server.py
CHANGED
|
@@ -339,7 +339,7 @@ config = Config(
|
|
| 339 |
index_path=args.index_path,
|
| 340 |
corpus_path=args.corpus_path,
|
| 341 |
retrieval_topk=args.topk,
|
| 342 |
-
faiss_gpu=
|
| 343 |
retrieval_model_path=args.retriever_model,
|
| 344 |
retrieval_pooling_method="mean",
|
| 345 |
retrieval_query_max_length=256,
|
|
|
|
| 339 |
index_path=args.index_path,
|
| 340 |
corpus_path=args.corpus_path,
|
| 341 |
retrieval_topk=args.topk,
|
| 342 |
+
faiss_gpu=False,
|
| 343 |
retrieval_model_path=args.retriever_model,
|
| 344 |
retrieval_pooling_method="mean",
|
| 345 |
retrieval_query_max_length=256,
|