|
|
import os |
|
|
import re |
|
|
import gradio as gr |
|
|
from huggingface_hub import InferenceClient, HfApi, CommitOperationAdd |
|
|
|
|
|
|
|
|
HF_TOKEN = os.environ.get("HF_TOKEN") |
|
|
|
|
|
if not HF_TOKEN: |
|
|
raise ValueError("HF_TOKEN environment variable is not set. Please add it to your Space settings.") |
|
|
|
|
|
def load_migration_guide(): |
|
|
"""Loads the migration context from the external txt file.""" |
|
|
try: |
|
|
with open("migration_guide.txt", "r", encoding="utf-8") as f: |
|
|
return f.read() |
|
|
except FileNotFoundError: |
|
|
return "Error: migration_guide.txt not found. Please ensure it is in the same directory." |
|
|
|
|
|
def extract_code_block(text: str) -> str: |
|
|
"""Extracts python code from LLM markdown response.""" |
|
|
pattern = r"```python\s*(.*?)\s*```" |
|
|
match = re.search(pattern, text, re.DOTALL) |
|
|
if match: |
|
|
return match.group(1) |
|
|
|
|
|
|
|
|
if "import gradio" in text: |
|
|
return text |
|
|
|
|
|
return "" |
|
|
|
|
|
def migrate_app(space_url): |
|
|
""" |
|
|
1. Fetches Space metadata and code using Env Token. |
|
|
2. Sends code + migration guide to LLM. |
|
|
3. Creates a PR on the Space. |
|
|
""" |
|
|
if not space_url: |
|
|
raise gr.Error("Please enter a Space URL.") |
|
|
|
|
|
|
|
|
api = HfApi(token=HF_TOKEN) |
|
|
client = InferenceClient(api_key=HF_TOKEN) |
|
|
|
|
|
|
|
|
try: |
|
|
user = api.whoami() |
|
|
print(f"Authenticated as {user['name']}") |
|
|
except Exception as e: |
|
|
raise gr.Error(f"Authentication failed using HF_TOKEN: {str(e)}") |
|
|
|
|
|
|
|
|
space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/") |
|
|
|
|
|
try: |
|
|
|
|
|
readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space") |
|
|
with open(readme_path, "r", encoding="utf-8") as f: |
|
|
readme_content = f.read() |
|
|
|
|
|
|
|
|
app_file_match = re.search(r"app_file:\s*(.*)", readme_content) |
|
|
app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py" |
|
|
|
|
|
|
|
|
code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space") |
|
|
with open(code_path, "r", encoding="utf-8") as f: |
|
|
original_code = f.read() |
|
|
|
|
|
except Exception as e: |
|
|
raise gr.Error(f"Error fetching files from {space_id}: {str(e)}") |
|
|
|
|
|
|
|
|
migration_guide = load_migration_guide() |
|
|
|
|
|
system_prompt = ( |
|
|
"You are an expert Python developer specializing in Gradio. " |
|
|
"Your task is to strictly migrate a Gradio 5.x application to Gradio 6.x based on the provided guide.\n\n" |
|
|
"### MIGRATION GUIDE ###\n" |
|
|
f"{migration_guide}\n\n" |
|
|
"### INSTRUCTIONS ###\n" |
|
|
"1. Analyze the user's code.\n" |
|
|
"2. Apply ALL necessary changes according to the guide (e.g., Blocks parameters, Chatbot tuples to messages, Video return types, API visibility).\n" |
|
|
"3. Output ONLY the complete, runnable Python code inside a ```python markdown block.\n" |
|
|
"4. Do not include conversational text, only the code." |
|
|
) |
|
|
|
|
|
messages = [ |
|
|
{"role": "system", "content": system_prompt}, |
|
|
{"role": "user", "content": f"Migrate this code to Gradio 6:\n\n```python\n{original_code}\n```"} |
|
|
] |
|
|
|
|
|
|
|
|
gr.Info(f"Analysing {app_file_name} with moonshotai/Kimi-K2-Thinking...") |
|
|
|
|
|
try: |
|
|
completion = client.chat.completions.create( |
|
|
model="moonshotai/Kimi-K2-Thinking:novita", |
|
|
messages=messages, |
|
|
temperature=0.1, |
|
|
max_tokens=8000, |
|
|
) |
|
|
llm_response = completion.choices[0].message.content |
|
|
migrated_code = extract_code_block(llm_response) |
|
|
|
|
|
if not migrated_code: |
|
|
raise ValueError("LLM failed to generate valid Python code block.") |
|
|
|
|
|
except Exception as e: |
|
|
raise gr.Error(f"LLM Processing failed: {str(e)}") |
|
|
|
|
|
|
|
|
|
|
|
new_readme_content = re.sub( |
|
|
r"sdk_version:.*", |
|
|
"sdk_version: 6.0.0", |
|
|
readme_content |
|
|
) |
|
|
|
|
|
operations = [ |
|
|
CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=new_readme_content.encode('utf-8')), |
|
|
CommitOperationAdd(path_in_repo=app_file_name, path_or_fileobj=migrated_code.encode('utf-8')), |
|
|
] |
|
|
|
|
|
pr_title = "[AUTOMATED] Migration to Gradio 6.0" |
|
|
pr_description = ( |
|
|
"This PR migrates the Space to Gradio 6.0.\n\n" |
|
|
"### Changes\n" |
|
|
"- `README.md`: Updated `sdk_version` to `6.0.0`\n" |
|
|
f"- `{app_file_name}`: Automated refactoring using `moonshotai/Kimi-K2-Thinking` based on the migration guide." |
|
|
) |
|
|
|
|
|
|
|
|
try: |
|
|
commit_info = api.create_commit( |
|
|
repo_id=space_id, |
|
|
operations=operations, |
|
|
commit_message=pr_title, |
|
|
commit_description=pr_description, |
|
|
repo_type="space", |
|
|
create_pr=True |
|
|
) |
|
|
return f"## ✅ Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})" |
|
|
except Exception as e: |
|
|
raise gr.Error(f"Failed to create Pull Request: {str(e)}") |
|
|
|
|
|
|
|
|
with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo: |
|
|
gr.Markdown("# 🚀 Gradio 6 Auto-Migrator") |
|
|
gr.Markdown( |
|
|
"Enter a Hugging Face Space URL below. This tool will:\n" |
|
|
"1. Fetch your code.\n" |
|
|
"2. Use **Kimi-K2-Thinking** to refactor it based on the Official Gradio 6 Migration Guide.\n" |
|
|
"3. Open a Pull Request on your Space automatically." |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
space_input = gr.Textbox( |
|
|
label="Space URL or ID", |
|
|
placeholder="username/space-name", |
|
|
scale=4 |
|
|
) |
|
|
btn = gr.Button("Migrate Space", variant="primary", scale=1) |
|
|
|
|
|
output_md = gr.Markdown(label="Status") |
|
|
|
|
|
btn.click( |
|
|
fn=migrate_app, |
|
|
inputs=[space_input], |
|
|
outputs=output_md |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |