Spaces:
Running
Running
Abid Ali Awan
commited on
Commit
·
f5327ec
1
Parent(s):
abfac44
refactor: Introduce file_to_url function for improved file handling, update file URL generation logic, and enhance chat history management for better integration with the Gradio application.
Browse files
app.py
CHANGED
|
@@ -16,6 +16,9 @@ MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp
|
|
| 16 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
| 17 |
MODEL = "gpt-5-mini"
|
| 18 |
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
class MCPClientManager:
|
| 21 |
def __init__(self, server_url: str):
|
|
@@ -71,7 +74,7 @@ openai_client = OpenAI(api_key=OPENAI_API_KEY)
|
|
| 71 |
SYSTEM_PROMPT = """You are a helpful ML assistant with access to Auto Deployer tools.
|
| 72 |
|
| 73 |
IMPORTANT: When calling tools with file_path parameter:
|
| 74 |
-
-
|
| 75 |
- Pass ONLY the raw URL (e.g., "https://...")
|
| 76 |
- Never add prefixes like "Gradio File Input - "
|
| 77 |
|
|
@@ -90,11 +93,9 @@ async def chat(message: str, history: list, file_url: str):
|
|
| 90 |
if file_url:
|
| 91 |
user_content = f"[Uploaded CSV file URL: {file_url}]\n\n{message}"
|
| 92 |
|
| 93 |
-
# Build history
|
| 94 |
for item in history:
|
| 95 |
-
if isinstance(item,
|
| 96 |
-
messages.append({"role": item["role"], "content": item["content"]})
|
| 97 |
-
elif isinstance(item, (list, tuple)) and len(item) == 2:
|
| 98 |
user_msg, assistant_msg = item
|
| 99 |
messages.append({"role": "user", "content": user_msg})
|
| 100 |
if assistant_msg:
|
|
@@ -165,11 +166,32 @@ async def chat(message: str, history: list, file_url: str):
|
|
| 165 |
yield partial_response
|
| 166 |
|
| 167 |
|
| 168 |
-
def
|
| 169 |
-
"""
|
| 170 |
-
if
|
| 171 |
return ""
|
| 172 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
|
| 174 |
|
| 175 |
with gr.Blocks(title="Auto Deployer MCP Client") as demo:
|
|
@@ -188,7 +210,7 @@ with gr.Blocks(title="Auto Deployer MCP Client") as demo:
|
|
| 188 |
type="filepath",
|
| 189 |
)
|
| 190 |
file_url = gr.Textbox(
|
| 191 |
-
label="File URL",
|
| 192 |
placeholder="Upload a file or paste a URL",
|
| 193 |
interactive=True,
|
| 194 |
)
|
|
@@ -222,7 +244,11 @@ with gr.Blocks(title="Auto Deployer MCP Client") as demo:
|
|
| 222 |
)
|
| 223 |
|
| 224 |
# Update file URL when file is uploaded
|
| 225 |
-
file_input.change(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 226 |
|
| 227 |
# Chat handlers
|
| 228 |
async def respond(message, history, url):
|
|
|
|
| 16 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
| 17 |
MODEL = "gpt-5-mini"
|
| 18 |
|
| 19 |
+
# Will be set when app launches
|
| 20 |
+
APP_URL = None
|
| 21 |
+
|
| 22 |
|
| 23 |
class MCPClientManager:
|
| 24 |
def __init__(self, server_url: str):
|
|
|
|
| 74 |
SYSTEM_PROMPT = """You are a helpful ML assistant with access to Auto Deployer tools.
|
| 75 |
|
| 76 |
IMPORTANT: When calling tools with file_path parameter:
|
| 77 |
+
- Use the provided file URL directly
|
| 78 |
- Pass ONLY the raw URL (e.g., "https://...")
|
| 79 |
- Never add prefixes like "Gradio File Input - "
|
| 80 |
|
|
|
|
| 93 |
if file_url:
|
| 94 |
user_content = f"[Uploaded CSV file URL: {file_url}]\n\n{message}"
|
| 95 |
|
| 96 |
+
# Build history
|
| 97 |
for item in history:
|
| 98 |
+
if isinstance(item, (list, tuple)) and len(item) == 2:
|
|
|
|
|
|
|
| 99 |
user_msg, assistant_msg = item
|
| 100 |
messages.append({"role": "user", "content": user_msg})
|
| 101 |
if assistant_msg:
|
|
|
|
| 166 |
yield partial_response
|
| 167 |
|
| 168 |
|
| 169 |
+
def file_to_url(file_path: str, request: gr.Request) -> str:
|
| 170 |
+
"""Convert local file path to HTTP URL"""
|
| 171 |
+
if file_path is None or file_path == "":
|
| 172 |
return ""
|
| 173 |
+
|
| 174 |
+
# If already a URL, return as-is
|
| 175 |
+
if file_path.startswith("http://") or file_path.startswith("https://"):
|
| 176 |
+
return file_path
|
| 177 |
+
|
| 178 |
+
# Get base URL from request or use HF Space URL
|
| 179 |
+
space_host = os.getenv("SPACE_HOST") # Set automatically on HF Spaces
|
| 180 |
+
|
| 181 |
+
if space_host:
|
| 182 |
+
base_url = f"https://{space_host}"
|
| 183 |
+
elif request:
|
| 184 |
+
# Local development - get from request headers
|
| 185 |
+
host = request.headers.get("host", "127.0.0.1:7860")
|
| 186 |
+
scheme = request.headers.get("x-forwarded-proto", "http")
|
| 187 |
+
base_url = f"{scheme}://{host}"
|
| 188 |
+
else:
|
| 189 |
+
base_url = "http://127.0.0.1:7860"
|
| 190 |
+
|
| 191 |
+
# Convert file path to URL
|
| 192 |
+
# Format: {base_url}/file={file_path}
|
| 193 |
+
http_url = f"{base_url}/file={file_path}"
|
| 194 |
+
return http_url
|
| 195 |
|
| 196 |
|
| 197 |
with gr.Blocks(title="Auto Deployer MCP Client") as demo:
|
|
|
|
| 210 |
type="filepath",
|
| 211 |
)
|
| 212 |
file_url = gr.Textbox(
|
| 213 |
+
label="File URL (auto-generated)",
|
| 214 |
placeholder="Upload a file or paste a URL",
|
| 215 |
interactive=True,
|
| 216 |
)
|
|
|
|
| 244 |
)
|
| 245 |
|
| 246 |
# Update file URL when file is uploaded
|
| 247 |
+
file_input.change(
|
| 248 |
+
fn=file_to_url,
|
| 249 |
+
inputs=file_input,
|
| 250 |
+
outputs=file_url,
|
| 251 |
+
)
|
| 252 |
|
| 253 |
# Chat handlers
|
| 254 |
async def respond(message, history, url):
|