|
|
import os |
|
|
import re |
|
|
import gradio as gr |
|
|
from huggingface_hub import InferenceClient, HfApi, CommitOperationAdd |
|
|
|
|
|
|
|
|
HF_TOKEN = os.environ.get("HF_TOKEN") |
|
|
|
|
|
def load_migration_guide(): |
|
|
"""Loads the migration context from the external txt file.""" |
|
|
try: |
|
|
with open("migration_guide.txt", "r", encoding="utf-8") as f: |
|
|
return f.read() |
|
|
except FileNotFoundError: |
|
|
return "Error: migration_guide.txt not found. Please ensure it is in the same directory." |
|
|
|
|
|
def extract_code_block(text: str) -> str: |
|
|
"""Extracts python code from LLM markdown response.""" |
|
|
pattern = r"```python\s*(.*?)\s*```" |
|
|
match = re.search(pattern, text, re.DOTALL) |
|
|
if match: |
|
|
return match.group(1) |
|
|
|
|
|
|
|
|
if "import gradio" in text: |
|
|
return text |
|
|
|
|
|
return "" |
|
|
|
|
|
def migrate_app_generator(space_url): |
|
|
""" |
|
|
Generator function that streams logs to a textbox and the final result to markdown. |
|
|
""" |
|
|
if not HF_TOKEN: |
|
|
yield "β Error: HF_TOKEN environment variable is not set.", "" |
|
|
return |
|
|
|
|
|
if not space_url: |
|
|
yield "β Error: Please enter a Space URL.", "" |
|
|
return |
|
|
|
|
|
log_buffer = [] |
|
|
|
|
|
def log(message): |
|
|
"""Helper to append to log buffer and return joined string.""" |
|
|
print(message) |
|
|
log_buffer.append(f"[{len(log_buffer)+1}] {message}") |
|
|
return "\n".join(log_buffer) |
|
|
|
|
|
|
|
|
yield log(f"π Starting migration for: {space_url}"), "" |
|
|
|
|
|
|
|
|
api = HfApi(token=HF_TOKEN) |
|
|
client = InferenceClient(api_key=HF_TOKEN) |
|
|
|
|
|
|
|
|
try: |
|
|
user = api.whoami() |
|
|
yield log(f"β
Authenticated as: {user['name']}"), "" |
|
|
except Exception as e: |
|
|
yield log(f"β Authentication failed: {str(e)}"), "" |
|
|
return |
|
|
|
|
|
|
|
|
space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/") |
|
|
|
|
|
try: |
|
|
|
|
|
yield log(f"π₯ Fetching README.md from {space_id}..."), "" |
|
|
readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space") |
|
|
with open(readme_path, "r", encoding="utf-8") as f: |
|
|
readme_content = f.read() |
|
|
|
|
|
|
|
|
app_file_match = re.search(r"app_file:\s*(.*)", readme_content) |
|
|
app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py" |
|
|
yield log(f"π Identified app file: {app_file_name}"), "" |
|
|
|
|
|
|
|
|
yield log(f"π₯ Fetching {app_file_name}..."), "" |
|
|
code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space") |
|
|
with open(code_path, "r", encoding="utf-8") as f: |
|
|
original_code = f.read() |
|
|
yield log(f"β
Code fetched successfully ({len(original_code)} chars)."), "" |
|
|
|
|
|
except Exception as e: |
|
|
yield log(f"β Error fetching files: {str(e)}"), "" |
|
|
return |
|
|
|
|
|
|
|
|
migration_guide = load_migration_guide() |
|
|
|
|
|
system_prompt = ( |
|
|
"You are an expert Python developer specializing in Gradio. " |
|
|
"Your task is to strictly migrate a Gradio 5.x application to Gradio 6.x based on the provided guide.\n\n" |
|
|
"### MIGRATION GUIDE ###\n" |
|
|
f"{migration_guide}\n\n" |
|
|
"### INSTRUCTIONS ###\n" |
|
|
"1. Analyze the user's code.\n" |
|
|
"2. Apply ALL necessary changes according to the guide (e.g., Blocks parameters, Chatbot tuples to messages, Video return types, API visibility).\n" |
|
|
"3. Output ONLY the complete, runnable Python code inside a ```python markdown block.\n" |
|
|
"4. Do not include conversational text, only the code." |
|
|
) |
|
|
|
|
|
messages = [ |
|
|
{"role": "system", "content": system_prompt}, |
|
|
{"role": "user", "content": f"Migrate this code to Gradio 6:\n\n```python\n{original_code}\n```"} |
|
|
] |
|
|
|
|
|
|
|
|
yield log("π§ Analyzing code with zai-org/GLM-4.6 (this may take ~30s)..."), "" |
|
|
|
|
|
try: |
|
|
completion = client.chat.completions.create( |
|
|
model="zai-org/GLM-4.6:cerebras", |
|
|
messages=messages, |
|
|
temperature=0.1, |
|
|
max_tokens=64000, |
|
|
) |
|
|
llm_response = completion.choices[0].message.content |
|
|
migrated_code = extract_code_block(llm_response) |
|
|
|
|
|
if not migrated_code: |
|
|
yield log("β LLM failed to generate valid Python code block."), "" |
|
|
return |
|
|
|
|
|
yield log("β¨ Code successfully migrated by LLM."), "" |
|
|
|
|
|
except Exception as e: |
|
|
yield log(f"β LLM Processing failed: {str(e)}"), "" |
|
|
return |
|
|
|
|
|
|
|
|
yield log("π¦ preparing Pull Request..."), "" |
|
|
|
|
|
|
|
|
|
|
|
new_readme_content = re.sub( |
|
|
r"sdk_version:.*", |
|
|
"sdk_version: 6.0.0", |
|
|
readme_content |
|
|
) |
|
|
|
|
|
operations = [ |
|
|
CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=new_readme_content.encode('utf-8')), |
|
|
CommitOperationAdd(path_in_repo=app_file_name, path_or_fileobj=migrated_code.encode('utf-8')), |
|
|
] |
|
|
|
|
|
pr_title = "[AUTOMATED] Migration to Gradio 6.0" |
|
|
pr_description = ( |
|
|
"This PR migrates the Space to Gradio 6.0.\n\n" |
|
|
"### Changes\n" |
|
|
"- `README.md`: Updated `sdk_version` to `6.0.0`\n" |
|
|
f"- `{app_file_name}`: Automated refactoring using `zai-org/GLM-4.6` based on the migration guide." |
|
|
) |
|
|
|
|
|
|
|
|
try: |
|
|
commit_info = api.create_commit( |
|
|
repo_id=space_id, |
|
|
operations=operations, |
|
|
commit_message=pr_title, |
|
|
commit_description=pr_description, |
|
|
repo_type="space", |
|
|
create_pr=True |
|
|
) |
|
|
yield log("π Done!"), f"## β
Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})" |
|
|
except Exception as e: |
|
|
yield log(f"β Failed to create Pull Request: {str(e)}"), "" |
|
|
|
|
|
|
|
|
with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo: |
|
|
gr.Markdown("# Gradio 6 Migration Tool π»") |
|
|
gr.Markdown("Migrate your Hugging Face Spaces from Gradio 5.x to Gradio 6.x ") |
|
|
with gr.Row(): |
|
|
space_input = gr.Textbox( |
|
|
label="Space URL or ID", |
|
|
placeholder="username/space-name", |
|
|
scale=4 |
|
|
) |
|
|
btn = gr.Button("Migrate Space", variant="primary", scale=1) |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1): |
|
|
log_output = gr.Textbox( |
|
|
label="Execution Log", |
|
|
lines=10, |
|
|
interactive=False, |
|
|
autoscroll=True |
|
|
) |
|
|
with gr.Column(scale=1): |
|
|
with gr.Accordion("Output", open=True, elem_id="output"): |
|
|
result_output = gr.Markdown(label="Result") |
|
|
|
|
|
btn.click( |
|
|
fn=migrate_app_generator, |
|
|
inputs=[space_input], |
|
|
outputs=[log_output, result_output] |
|
|
) |
|
|
|
|
|
css = '''.gradio-container .app { max-width: 900px !important; margin: 0 auto; } |
|
|
button{align-self: stretch} |
|
|
#output{height: 100%; align-self: stretch;} |
|
|
''' |
|
|
if __name__ == "__main__": |
|
|
demo.launch(css=css) |