File size: 6,246 Bytes
02d2af6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 |
import os
import re
import gradio as gr
from huggingface_hub import InferenceClient, HfApi, CommitOperationAdd
# 1. Load Environment Token
HF_TOKEN = os.environ.get("HF_TOKEN")
if not HF_TOKEN:
raise ValueError("HF_TOKEN environment variable is not set. Please add it to your Space settings.")
def load_migration_guide():
"""Loads the migration context from the external txt file."""
try:
with open("migration_guide.txt", "r", encoding="utf-8") as f:
return f.read()
except FileNotFoundError:
return "Error: migration_guide.txt not found. Please ensure it is in the same directory."
def extract_code_block(text: str) -> str:
"""Extracts python code from LLM markdown response."""
pattern = r"```python\s*(.*?)\s*```"
match = re.search(pattern, text, re.DOTALL)
if match:
return match.group(1)
# Fallback: if the model just returned code without blocks
if "import gradio" in text:
return text
return ""
def migrate_app(space_url):
"""
1. Fetches Space metadata and code using Env Token.
2. Sends code + migration guide to LLM.
3. Creates a PR on the Space.
"""
if not space_url:
raise gr.Error("Please enter a Space URL.")
# Setup Clients using the Environment Token
api = HfApi(token=HF_TOKEN)
client = InferenceClient(api_key=HF_TOKEN)
# Verify Auth
try:
user = api.whoami()
print(f"Authenticated as {user['name']}")
except Exception as e:
raise gr.Error(f"Authentication failed using HF_TOKEN: {str(e)}")
# Parse Space ID
space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/")
try:
# Get README to update SDK version and find app file
readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space")
with open(readme_path, "r", encoding="utf-8") as f:
readme_content = f.read()
# Determine python file name (default to app.py)
app_file_match = re.search(r"app_file:\s*(.*)", readme_content)
app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py"
# Get Python Code
code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space")
with open(code_path, "r", encoding="utf-8") as f:
original_code = f.read()
except Exception as e:
raise gr.Error(f"Error fetching files from {space_id}: {str(e)}")
# Prepare Prompt
migration_guide = load_migration_guide()
system_prompt = (
"You are an expert Python developer specializing in Gradio. "
"Your task is to strictly migrate a Gradio 5.x application to Gradio 6.x based on the provided guide.\n\n"
"### MIGRATION GUIDE ###\n"
f"{migration_guide}\n\n"
"### INSTRUCTIONS ###\n"
"1. Analyze the user's code.\n"
"2. Apply ALL necessary changes according to the guide (e.g., Blocks parameters, Chatbot tuples to messages, Video return types, API visibility).\n"
"3. Output ONLY the complete, runnable Python code inside a ```python markdown block.\n"
"4. Do not include conversational text, only the code."
)
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": f"Migrate this code to Gradio 6:\n\n```python\n{original_code}\n```"}
]
# Call Moonshot Model
gr.Info(f"Analysing {app_file_name} with moonshotai/Kimi-K2-Thinking...")
try:
completion = client.chat.completions.create(
model="moonshotai/Kimi-K2-Thinking:novita",
messages=messages,
temperature=0.1,
max_tokens=8000,
)
llm_response = completion.choices[0].message.content
migrated_code = extract_code_block(llm_response)
if not migrated_code:
raise ValueError("LLM failed to generate valid Python code block.")
except Exception as e:
raise gr.Error(f"LLM Processing failed: {str(e)}")
# Prepare Commit
# Update SDK version in README to 6.0.0
new_readme_content = re.sub(
r"sdk_version:.*",
"sdk_version: 6.0.0",
readme_content
)
operations = [
CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=new_readme_content.encode('utf-8')),
CommitOperationAdd(path_in_repo=app_file_name, path_or_fileobj=migrated_code.encode('utf-8')),
]
pr_title = "[AUTOMATED] Migration to Gradio 6.0"
pr_description = (
"This PR migrates the Space to Gradio 6.0.\n\n"
"### Changes\n"
"- `README.md`: Updated `sdk_version` to `6.0.0`\n"
f"- `{app_file_name}`: Automated refactoring using `moonshotai/Kimi-K2-Thinking` based on the migration guide."
)
# Create PR
try:
commit_info = api.create_commit(
repo_id=space_id,
operations=operations,
commit_message=pr_title,
commit_description=pr_description,
repo_type="space",
create_pr=True
)
return f"## ✅ Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})"
except Exception as e:
raise gr.Error(f"Failed to create Pull Request: {str(e)}")
# --- UI ---
with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo:
gr.Markdown("# 🚀 Gradio 6 Auto-Migrator")
gr.Markdown(
"Enter a Hugging Face Space URL below. This tool will:\n"
"1. Fetch your code.\n"
"2. Use **Kimi-K2-Thinking** to refactor it based on the Official Gradio 6 Migration Guide.\n"
"3. Open a Pull Request on your Space automatically."
)
with gr.Row():
space_input = gr.Textbox(
label="Space URL or ID",
placeholder="username/space-name",
scale=4
)
btn = gr.Button("Migrate Space", variant="primary", scale=1)
output_md = gr.Markdown(label="Status")
btn.click(
fn=migrate_app,
inputs=[space_input],
outputs=output_md
)
if __name__ == "__main__":
demo.launch() |