Spaces:
Sleeping
Sleeping
File size: 8,550 Bytes
ee16852 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 |
import gradio as gr
import requests
import time
import os
# Use localhost for HF Spaces since both services run in the same container
API_BASE_URL = "http://localhost:8000"
def extract_links(url):
"""Extract links from the given URL"""
endpoint = f"{API_BASE_URL}/extract_links"
payload = {"url": url}
try:
response = requests.post(endpoint, json=payload, timeout=30)
if response.status_code == 200:
return response.json()["unique_links"]
else:
raise Exception(f"Failed to extract links: {response.text}")
except requests.exceptions.RequestException as e:
raise Exception(f"Connection error: {str(e)}")
def extract_text(urls):
"""Extract text from URLs"""
endpoint = f"{API_BASE_URL}/extract_text"
try:
response = requests.post(endpoint, json=urls, timeout=60)
if response.status_code == 200:
return response.json()["file_saved"]
else:
raise Exception(f"Failed to extract text: {response.text}")
except requests.exceptions.RequestException as e:
raise Exception(f"Connection error: {str(e)}")
def perform_rag(file_path, prompt):
"""Perform RAG on the extracted text"""
endpoint = f"{API_BASE_URL}/rag"
payload = {"file_path": file_path, "prompt": prompt}
try:
response = requests.post(endpoint, json=payload, timeout=60)
if response.status_code == 200:
return response.json()
else:
raise Exception(f"Failed to perform RAG: {response.text}")
except requests.exceptions.RequestException as e:
raise Exception(f"Connection error: {str(e)}")
def check_api_health():
"""Check if FastAPI is running"""
try:
response = requests.get(f"{API_BASE_URL}/", timeout=5)
return response.status_code == 200
except:
return False
def process_web_rag(url, prompt, data_source, progress=gr.Progress()):
"""Main processing function with progress tracking"""
if not url or not prompt:
return "β Error: Please provide both URL and prompt", "", ""
# Check API health first
if not check_api_health():
return "β Error: FastAPI service is not available. Please wait a moment and try again.", "", ""
try:
progress(0.1, desc="Starting process...")
if data_source == "Multiple links (first 5)":
progress(0.2, desc="π Extracting links from webpage...")
links = extract_links(url)
sample_links = links[:5]
progress(0.4, desc="π Extracting text from multiple pages...")
file_path = extract_text(sample_links)
status_msg = f"β
Processed {len(sample_links)} pages from {len(links)} total links found"
else:
progress(0.3, desc="π Extracting text from homepage...")
file_path = extract_text([url])
status_msg = "β
Processed homepage content"
progress(0.7, desc="π€ Performing RAG analysis...")
result = perform_rag(file_path, prompt)
progress(1.0, desc="β
Complete!")
# Format the response
response_text = f"**Query:** {result['user_query']}\n\n**Response:** {result['assistant_response']}"
sources_text = result['sources']
return status_msg, response_text, sources_text
except Exception as e:
return f"β Error: {str(e)}", "", ""
# Custom CSS for modern styling
custom_css = """
.gradio-container {
max-width: 900px !important;
margin: auto !important;
}
.header-text {
text-align: center;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
font-size: 2.5em;
font-weight: bold;
margin-bottom: 0.5em;
}
.description-text {
text-align: center;
color: #666;
font-size: 1.1em;
margin-bottom: 2em;
}
.input-group {
background: #f8f9fa;
padding: 1.5em;
border-radius: 12px;
margin: 1em 0;
border: 1px solid #e9ecef;
}
.output-group {
background: #ffffff;
border-radius: 12px;
border: 1px solid #dee2e6;
margin: 1em 0;
}
.status-box {
padding: 1em;
border-radius: 8px;
margin: 0.5em 0;
}
.status-success {
background-color: #d4edda;
border-color: #c3e6cb;
color: #155724;
}
.status-error {
background-color: #f8d7da;
border-color: #f5c6cb;
color: #721c24;
}
"""
# Create the Gradio interface
with gr.Blocks(css=custom_css, title="Web RAG System", theme=gr.themes.Soft()) as app:
# Header
gr.HTML("""
<div class="header-text">π Web RAG System</div>
<div class="description-text">
Extract content from web pages and ask questions using AI-powered retrieval
</div>
""")
with gr.Row():
with gr.Column(scale=1):
# Input section
gr.HTML('<div style="font-size: 1.2em; font-weight: bold; margin-bottom: 1em;">π Input Configuration</div>')
url_input = gr.Textbox(
label="π Website URL",
placeholder="https://example.com",
info="Enter the URL you want to analyze"
)
prompt_input = gr.Textbox(
label="β Your Question",
placeholder="What is this website about?",
lines=3,
info="Ask any question about the content"
)
data_source = gr.Radio(
choices=["Multiple links (first 5)", "Homepage only"],
value="Multiple links (first 5)",
label="π Data Source",
info="Choose how much content to analyze"
)
process_btn = gr.Button(
"π Analyze Website",
variant="primary",
size="lg"
)
# Output section
gr.HTML('<div style="font-size: 1.2em; font-weight: bold; margin: 2em 0 1em 0;">π Results</div>')
status_output = gr.Textbox(
label="π Processing Status",
interactive=False,
show_label=True
)
with gr.Row():
with gr.Column(scale=2):
response_output = gr.Textbox(
label="π€ AI Response",
lines=8,
interactive=False,
show_label=True
)
with gr.Column(scale=1):
sources_output = gr.Textbox(
label="π Sources",
lines=8,
interactive=False,
show_label=True
)
# Example section
gr.HTML("""
<div style="margin-top: 2em; padding: 1.5em; background: #f8f9fa; border-radius: 12px; border-left: 4px solid #667eea;">
<h3 style="margin-top: 0; color: #333;">π‘ Example Usage</h3>
<p><strong>URL:</strong> https://openai.com</p>
<p><strong>Question:</strong> What are the main products and services offered?</p>
<p><strong>Data Source:</strong> Multiple links (first 5)</p>
</div>
""")
# Add a note about the system status
gr.HTML("""
<div style="margin-top: 1em; padding: 1em; background: #e3f2fd; border-radius: 8px; border-left: 4px solid #2196f3;">
<p style="margin: 0; color: #0d47a1;">
βΉοΈ <strong>Note:</strong> If you encounter connection errors, please wait a moment for the system to initialize and try again.
</p>
</div>
""")
# Connect the function
process_btn.click(
fn=process_web_rag,
inputs=[url_input, prompt_input, data_source],
outputs=[status_output, response_output, sources_output],
show_progress=True
)
# Add keyboard shortcut
url_input.submit(
fn=process_web_rag,
inputs=[url_input, prompt_input, data_source],
outputs=[status_output, response_output, sources_output],
show_progress=True
)
prompt_input.submit(
fn=process_web_rag,
inputs=[url_input, prompt_input, data_source],
outputs=[status_output, response_output, sources_output],
show_progress=True
)
if __name__ == "__main__":
app.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
show_error=True,
quiet=False
) |