multimodalart's picture
Update app.py
eef328d verified
import os
import re
import gradio as gr
from huggingface_hub import InferenceClient, HfApi, CommitOperationAdd
# 1. Load Environment Token
HF_TOKEN = os.environ.get("HF_TOKEN")
def load_migration_guide():
"""Loads the migration context from the external txt file."""
try:
with open("migration_guide.txt", "r", encoding="utf-8") as f:
return f.read()
except FileNotFoundError:
return "Error: migration_guide.txt not found. Please ensure it is in the same directory."
def extract_code_block(text: str) -> str:
"""Extracts python code from LLM markdown response."""
pattern = r"```python\s*(.*?)\s*```"
match = re.search(pattern, text, re.DOTALL)
if match:
return match.group(1)
# Fallback: if the model just returned code without blocks
if "import gradio" in text:
return text
return ""
def migrate_app_generator(space_url):
"""
Generator function that streams logs to a textbox and the final result to markdown.
"""
if not HF_TOKEN:
yield "❌ Error: HF_TOKEN environment variable is not set.", ""
return
if not space_url:
yield "❌ Error: Please enter a Space URL.", ""
return
log_buffer = []
def log(message):
"""Helper to append to log buffer and return joined string."""
print(message) # Console log
log_buffer.append(f"[{len(log_buffer)+1}] {message}")
return "\n".join(log_buffer)
# 1. Initialization
yield log(f"πŸš€ Starting migration for: {space_url}"), ""
# Setup Clients
api = HfApi(token=HF_TOKEN)
client = InferenceClient(api_key=HF_TOKEN)
# Verify Auth
try:
user = api.whoami()
yield log(f"βœ… Authenticated as: {user['name']}"), ""
except Exception as e:
yield log(f"❌ Authentication failed: {str(e)}"), ""
return
# Parse Space ID
space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/")
try:
# Get README
yield log(f"πŸ“₯ Fetching README.md from {space_id}..."), ""
readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space")
with open(readme_path, "r", encoding="utf-8") as f:
readme_content = f.read()
# Determine python file name (default to app.py)
app_file_match = re.search(r"app_file:\s*(.*)", readme_content)
app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py"
yield log(f"πŸ“„ Identified app file: {app_file_name}"), ""
# Get Python Code
yield log(f"πŸ“₯ Fetching {app_file_name}..."), ""
code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space")
with open(code_path, "r", encoding="utf-8") as f:
original_code = f.read()
yield log(f"βœ… Code fetched successfully ({len(original_code)} chars)."), ""
except Exception as e:
yield log(f"❌ Error fetching files: {str(e)}"), ""
return
# Prepare Prompt
migration_guide = load_migration_guide()
system_prompt = (
"You are an expert Python developer specializing in Gradio. "
"Your task is to strictly migrate a Gradio 5.x application to Gradio 6.x based on the provided guide.\n\n"
"### MIGRATION GUIDE ###\n"
f"{migration_guide}\n\n"
"### INSTRUCTIONS ###\n"
"1. Analyze the user's code.\n"
"2. Apply ALL necessary changes according to the guide (e.g., Blocks parameters, Chatbot tuples to messages, Video return types, API visibility).\n"
"3. Output ONLY the complete, runnable Python code inside a ```python markdown block.\n"
"4. Do not include conversational text, only the code."
)
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": f"Migrate this code to Gradio 6:\n\n```python\n{original_code}\n```"}
]
# Call Moonshot Model
yield log("🧠 Analyzing code with zai-org/GLM-4.6 (this may take ~30s)..."), ""
try:
completion = client.chat.completions.create(
model="zai-org/GLM-4.6:cerebras",
messages=messages,
temperature=0.1,
max_tokens=64000,
)
llm_response = completion.choices[0].message.content
migrated_code = extract_code_block(llm_response)
if not migrated_code:
yield log("❌ LLM failed to generate valid Python code block."), ""
return
yield log("✨ Code successfully migrated by LLM."), ""
except Exception as e:
yield log(f"❌ LLM Processing failed: {str(e)}"), ""
return
# Prepare Commit
yield log("πŸ“¦ preparing Pull Request..."), ""
# Update SDK version in README to 6.0.0
# Uses regex to replace sdk_version line
new_readme_content = re.sub(
r"sdk_version:.*",
"sdk_version: 6.0.0",
readme_content
)
operations = [
CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=new_readme_content.encode('utf-8')),
CommitOperationAdd(path_in_repo=app_file_name, path_or_fileobj=migrated_code.encode('utf-8')),
]
pr_title = "[AUTOMATED] Migration to Gradio 6.0"
pr_description = (
"This PR migrates the Space to Gradio 6.0.\n\n"
"### Changes\n"
"- `README.md`: Updated `sdk_version` to `6.0.0`\n"
f"- `{app_file_name}`: Automated refactoring using `zai-org/GLM-4.6` based on the migration guide."
)
# Create PR
try:
commit_info = api.create_commit(
repo_id=space_id,
operations=operations,
commit_message=pr_title,
commit_description=pr_description,
repo_type="space",
create_pr=True
)
yield log("πŸŽ‰ Done!"), f"## βœ… Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})"
except Exception as e:
yield log(f"❌ Failed to create Pull Request: {str(e)}"), ""
# --- UI ---
with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo:
gr.Markdown("# Gradio 6 Migration Tool πŸ’»")
gr.Markdown("Migrate your Hugging Face Spaces from Gradio 5.x to Gradio 6.x ")
with gr.Row():
space_input = gr.Textbox(
label="Space URL or ID",
placeholder="username/space-name",
scale=4
)
btn = gr.Button("Migrate Space", variant="primary", scale=1)
with gr.Row():
with gr.Column(scale=1):
log_output = gr.Textbox(
label="Execution Log",
lines=10,
interactive=False,
autoscroll=True
)
with gr.Column(scale=1):
with gr.Accordion("Output", open=True, elem_id="output"):
result_output = gr.Markdown(label="Result")
btn.click(
fn=migrate_app_generator,
inputs=[space_input],
outputs=[log_output, result_output]
)
css = '''.gradio-container .app { max-width: 900px !important; margin: 0 auto; }
button{align-self: stretch}
#output{height: 100%; align-self: stretch;}
'''
if __name__ == "__main__":
demo.launch(css=css)