File size: 7,363 Bytes
02d2af6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5aea99
02d2af6
d5aea99
02d2af6
d5aea99
 
 
 
02d2af6
d5aea99
 
 
 
cd38fda
d5aea99
 
 
 
 
02d2af6
d5aea99
 
 
 
02d2af6
 
 
 
 
 
d5aea99
02d2af6
d5aea99
 
02d2af6
 
 
 
 
d5aea99
 
02d2af6
 
 
 
 
 
 
d5aea99
02d2af6
 
d5aea99
02d2af6
 
 
d5aea99
02d2af6
 
d5aea99
 
02d2af6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
eef328d
02d2af6
 
 
cd38fda
02d2af6
 
cd38fda
02d2af6
 
 
 
 
d5aea99
 
 
 
02d2af6
 
d5aea99
 
02d2af6
 
d5aea99
 
02d2af6
d5aea99
02d2af6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
eef328d
02d2af6
 
 
 
 
 
 
 
 
 
 
 
d5aea99
02d2af6
d5aea99
02d2af6
 
 
6a94212
 
02d2af6
 
 
 
 
 
 
 
d5aea99
 
 
 
 
 
 
 
 
953028d
b116c1f
02d2af6
 
d5aea99
02d2af6
d5aea99
02d2af6
6a94212
c6e1752
953028d
 
 
02d2af6
6a94212
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
import os
import re
import gradio as gr
from huggingface_hub import InferenceClient, HfApi, CommitOperationAdd

# 1. Load Environment Token
HF_TOKEN = os.environ.get("HF_TOKEN")

def load_migration_guide():
    """Loads the migration context from the external txt file."""
    try:
        with open("migration_guide.txt", "r", encoding="utf-8") as f:
            return f.read()
    except FileNotFoundError:
        return "Error: migration_guide.txt not found. Please ensure it is in the same directory."

def extract_code_block(text: str) -> str:
    """Extracts python code from LLM markdown response."""
    pattern = r"```python\s*(.*?)\s*```"
    match = re.search(pattern, text, re.DOTALL)
    if match:
        return match.group(1)
    
    # Fallback: if the model just returned code without blocks
    if "import gradio" in text:
        return text
        
    return ""

def migrate_app_generator(space_url):
    """
    Generator function that streams logs to a textbox and the final result to markdown.
    """
    if not HF_TOKEN:
        yield "❌ Error: HF_TOKEN environment variable is not set.", ""
        return

    if not space_url:
        yield "❌ Error: Please enter a Space URL.", ""
        return

    log_buffer = []
    
    def log(message):
        """Helper to append to log buffer and return joined string."""
        print(message) # Console log
        log_buffer.append(f"[{len(log_buffer)+1}] {message}")
        return "\n".join(log_buffer)

    # 1. Initialization
    yield log(f"πŸš€ Starting migration for: {space_url}"), ""

    # Setup Clients
    api = HfApi(token=HF_TOKEN)
    client = InferenceClient(api_key=HF_TOKEN)
    
    # Verify Auth
    try:
        user = api.whoami()
        yield log(f"βœ… Authenticated as: {user['name']}"), ""
    except Exception as e:
        yield log(f"❌ Authentication failed: {str(e)}"), ""
        return

    # Parse Space ID
    space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/")
    
    try:
        # Get README
        yield log(f"πŸ“₯ Fetching README.md from {space_id}..."), ""
        readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space")
        with open(readme_path, "r", encoding="utf-8") as f:
            readme_content = f.read()
            
        # Determine python file name (default to app.py)
        app_file_match = re.search(r"app_file:\s*(.*)", readme_content)
        app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py"
        yield log(f"πŸ“„ Identified app file: {app_file_name}"), ""
        
        # Get Python Code
        yield log(f"πŸ“₯ Fetching {app_file_name}..."), ""
        code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space")
        with open(code_path, "r", encoding="utf-8") as f:
            original_code = f.read()
        yield log(f"βœ… Code fetched successfully ({len(original_code)} chars)."), ""
            
    except Exception as e:
        yield log(f"❌ Error fetching files: {str(e)}"), ""
        return

    # Prepare Prompt
    migration_guide = load_migration_guide()
    
    system_prompt = (
        "You are an expert Python developer specializing in Gradio. "
        "Your task is to strictly migrate a Gradio 5.x application to Gradio 6.x based on the provided guide.\n\n"
        "### MIGRATION GUIDE ###\n"
        f"{migration_guide}\n\n"
        "### INSTRUCTIONS ###\n"
        "1. Analyze the user's code.\n"
        "2. Apply ALL necessary changes according to the guide (e.g., Blocks parameters, Chatbot tuples to messages, Video return types, API visibility).\n"
        "3. Output ONLY the complete, runnable Python code inside a ```python markdown block.\n"
        "4. Do not include conversational text, only the code."
    )

    messages = [
        {"role": "system", "content": system_prompt},
        {"role": "user", "content": f"Migrate this code to Gradio 6:\n\n```python\n{original_code}\n```"}
    ]

    # Call Moonshot Model
    yield log("🧠 Analyzing code with zai-org/GLM-4.6 (this may take ~30s)..."), ""
    
    try:
        completion = client.chat.completions.create(
            model="zai-org/GLM-4.6:cerebras",
            messages=messages,
            temperature=0.1,
            max_tokens=64000, 
        )
        llm_response = completion.choices[0].message.content
        migrated_code = extract_code_block(llm_response)
        
        if not migrated_code:
            yield log("❌ LLM failed to generate valid Python code block."), ""
            return
            
        yield log("✨ Code successfully migrated by LLM."), ""
            
    except Exception as e:
        yield log(f"❌ LLM Processing failed: {str(e)}"), ""
        return

    # Prepare Commit
    yield log("πŸ“¦ preparing Pull Request..."), ""
    
    # Update SDK version in README to 6.0.0
    # Uses regex to replace sdk_version line
    new_readme_content = re.sub(
        r"sdk_version:.*", 
        "sdk_version: 6.0.0", 
        readme_content
    )
    
    operations = [
        CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=new_readme_content.encode('utf-8')),
        CommitOperationAdd(path_in_repo=app_file_name, path_or_fileobj=migrated_code.encode('utf-8')),
    ]

    pr_title = "[AUTOMATED] Migration to Gradio 6.0"
    pr_description = (
        "This PR migrates the Space to Gradio 6.0.\n\n"
        "### Changes\n"
        "- `README.md`: Updated `sdk_version` to `6.0.0`\n"
        f"- `{app_file_name}`: Automated refactoring using `zai-org/GLM-4.6` based on the migration guide."
    )

    # Create PR
    try:
        commit_info = api.create_commit(
            repo_id=space_id,
            operations=operations,
            commit_message=pr_title,
            commit_description=pr_description,
            repo_type="space",
            create_pr=True
        )
        yield log("πŸŽ‰ Done!"), f"## βœ… Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})"
    except Exception as e:
        yield log(f"❌ Failed to create Pull Request: {str(e)}"), ""

# --- UI ---
with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo:
    gr.Markdown("# Gradio 6 Migration Tool πŸ’»")
    gr.Markdown("Migrate your Hugging Face Spaces from Gradio 5.x to Gradio 6.x ")
    with gr.Row():
        space_input = gr.Textbox(
            label="Space URL or ID", 
            placeholder="username/space-name",
            scale=4
        )
        btn = gr.Button("Migrate Space", variant="primary", scale=1)
    
    with gr.Row():
        with gr.Column(scale=1):
            log_output = gr.Textbox(
                label="Execution Log", 
                lines=10, 
                interactive=False,
                autoscroll=True
            )
        with gr.Column(scale=1):
            with gr.Accordion("Output", open=True, elem_id="output"):
                result_output = gr.Markdown(label="Result")

    btn.click(
        fn=migrate_app_generator, 
        inputs=[space_input], 
        outputs=[log_output, result_output]
    )
    
css = '''.gradio-container .app { max-width: 900px !important; margin: 0 auto; }
button{align-self: stretch}
#output{height: 100%; align-self: stretch;}
'''
if __name__ == "__main__":
    demo.launch(css=css)