multimodalart HF Staff commited on
Commit
d5aea99
Β·
verified Β·
1 Parent(s): 0c5f6d7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -21
app.py CHANGED
@@ -6,9 +6,6 @@ from huggingface_hub import InferenceClient, HfApi, CommitOperationAdd
6
  # 1. Load Environment Token
7
  HF_TOKEN = os.environ.get("HF_TOKEN")
8
 
9
- if not HF_TOKEN:
10
- raise ValueError("HF_TOKEN environment variable is not set. Please add it to your Space settings.")
11
-
12
  def load_migration_guide():
13
  """Loads the migration context from the external txt file."""
14
  try:
@@ -30,31 +27,47 @@ def extract_code_block(text: str) -> str:
30
 
31
  return ""
32
 
33
- def migrate_app(space_url):
34
  """
35
- 1. Fetches Space metadata and code using Env Token.
36
- 2. Sends code + migration guide to LLM.
37
- 3. Creates a PR on the Space.
38
  """
 
 
 
 
39
  if not space_url:
40
- raise gr.Error("Please enter a Space URL.")
 
 
 
 
 
 
 
 
 
41
 
42
- # Setup Clients using the Environment Token
 
 
 
43
  api = HfApi(token=HF_TOKEN)
44
  client = InferenceClient(api_key=HF_TOKEN)
45
 
46
  # Verify Auth
47
  try:
48
  user = api.whoami()
49
- print(f"Authenticated as {user['name']}")
50
  except Exception as e:
51
- raise gr.Error(f"Authentication failed using HF_TOKEN: {str(e)}")
 
52
 
53
  # Parse Space ID
54
  space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/")
55
 
56
  try:
57
- # Get README to update SDK version and find app file
 
58
  readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space")
59
  with open(readme_path, "r", encoding="utf-8") as f:
60
  readme_content = f.read()
@@ -62,14 +75,18 @@ def migrate_app(space_url):
62
  # Determine python file name (default to app.py)
63
  app_file_match = re.search(r"app_file:\s*(.*)", readme_content)
64
  app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py"
 
65
 
66
  # Get Python Code
 
67
  code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space")
68
  with open(code_path, "r", encoding="utf-8") as f:
69
  original_code = f.read()
 
70
 
71
  except Exception as e:
72
- raise gr.Error(f"Error fetching files from {space_id}: {str(e)}")
 
73
 
74
  # Prepare Prompt
75
  migration_guide = load_migration_guide()
@@ -92,7 +109,7 @@ def migrate_app(space_url):
92
  ]
93
 
94
  # Call Moonshot Model
95
- gr.Info(f"Analysing {app_file_name} with moonshotai/Kimi-K2-Thinking...")
96
 
97
  try:
98
  completion = client.chat.completions.create(
@@ -105,13 +122,20 @@ def migrate_app(space_url):
105
  migrated_code = extract_code_block(llm_response)
106
 
107
  if not migrated_code:
108
- raise ValueError("LLM failed to generate valid Python code block.")
 
 
 
109
 
110
  except Exception as e:
111
- raise gr.Error(f"LLM Processing failed: {str(e)}")
 
112
 
113
  # Prepare Commit
 
 
114
  # Update SDK version in README to 6.0.0
 
115
  new_readme_content = re.sub(
116
  r"sdk_version:.*",
117
  "sdk_version: 6.0.0",
@@ -141,9 +165,9 @@ def migrate_app(space_url):
141
  repo_type="space",
142
  create_pr=True
143
  )
144
- return f"## βœ… Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})"
145
  except Exception as e:
146
- raise gr.Error(f"Failed to create Pull Request: {str(e)}")
147
 
148
  # --- UI ---
149
  with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo:
@@ -163,12 +187,21 @@ with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo:
163
  )
164
  btn = gr.Button("Migrate Space", variant="primary", scale=1)
165
 
166
- output_md = gr.Markdown(label="Status")
 
 
 
 
 
 
 
 
 
167
 
168
  btn.click(
169
- fn=migrate_app,
170
  inputs=[space_input],
171
- outputs=output_md
172
  )
173
 
174
  if __name__ == "__main__":
 
6
  # 1. Load Environment Token
7
  HF_TOKEN = os.environ.get("HF_TOKEN")
8
 
 
 
 
9
  def load_migration_guide():
10
  """Loads the migration context from the external txt file."""
11
  try:
 
27
 
28
  return ""
29
 
30
+ def migrate_app_generator(space_url):
31
  """
32
+ Generator function that streams logs to a textbox and the final result to markdown.
 
 
33
  """
34
+ if not HF_TOKEN:
35
+ yield "❌ Error: HF_TOKEN environment variable is not set.", ""
36
+ return
37
+
38
  if not space_url:
39
+ yield "❌ Error: Please enter a Space URL.", ""
40
+ return
41
+
42
+ log_buffer = []
43
+
44
+ def log(message):
45
+ """Helper to append to log buffer and return joined string."""
46
+ print(message) # Console log
47
+ log_buffer.append(f"[{len(log_buffer)+1}] {message}")
48
+ return "\n".join(log_buffer)
49
 
50
+ # 1. Initialization
51
+ yield log(f"πŸš€ Starting migration for: {space_url}"), ""
52
+
53
+ # Setup Clients
54
  api = HfApi(token=HF_TOKEN)
55
  client = InferenceClient(api_key=HF_TOKEN)
56
 
57
  # Verify Auth
58
  try:
59
  user = api.whoami()
60
+ yield log(f"βœ… Authenticated as: {user['name']}"), ""
61
  except Exception as e:
62
+ yield log(f"❌ Authentication failed: {str(e)}"), ""
63
+ return
64
 
65
  # Parse Space ID
66
  space_id = space_url.replace("https://huggingface.co/spaces/", "").strip("/")
67
 
68
  try:
69
+ # Get README
70
+ yield log(f"πŸ“₯ Fetching README.md from {space_id}..."), ""
71
  readme_path = api.hf_hub_download(repo_id=space_id, filename="README.md", repo_type="space")
72
  with open(readme_path, "r", encoding="utf-8") as f:
73
  readme_content = f.read()
 
75
  # Determine python file name (default to app.py)
76
  app_file_match = re.search(r"app_file:\s*(.*)", readme_content)
77
  app_file_name = app_file_match.group(1).strip() if app_file_match else "app.py"
78
+ yield log(f"πŸ“„ Identified app file: {app_file_name}"), ""
79
 
80
  # Get Python Code
81
+ yield log(f"πŸ“₯ Fetching {app_file_name}..."), ""
82
  code_path = api.hf_hub_download(repo_id=space_id, filename=app_file_name, repo_type="space")
83
  with open(code_path, "r", encoding="utf-8") as f:
84
  original_code = f.read()
85
+ yield log(f"βœ… Code fetched successfully ({len(original_code)} chars)."), ""
86
 
87
  except Exception as e:
88
+ yield log(f"❌ Error fetching files: {str(e)}"), ""
89
+ return
90
 
91
  # Prepare Prompt
92
  migration_guide = load_migration_guide()
 
109
  ]
110
 
111
  # Call Moonshot Model
112
+ yield log("🧠 Analyzing code with moonshotai/Kimi-K2-Thinking (this may take 30-60s)..."), ""
113
 
114
  try:
115
  completion = client.chat.completions.create(
 
122
  migrated_code = extract_code_block(llm_response)
123
 
124
  if not migrated_code:
125
+ yield log("❌ LLM failed to generate valid Python code block."), ""
126
+ return
127
+
128
+ yield log("✨ Code successfully migrated by LLM."), ""
129
 
130
  except Exception as e:
131
+ yield log(f"❌ LLM Processing failed: {str(e)}"), ""
132
+ return
133
 
134
  # Prepare Commit
135
+ yield log("πŸ“¦ preparing Pull Request..."), ""
136
+
137
  # Update SDK version in README to 6.0.0
138
+ # Uses regex to replace sdk_version line
139
  new_readme_content = re.sub(
140
  r"sdk_version:.*",
141
  "sdk_version: 6.0.0",
 
165
  repo_type="space",
166
  create_pr=True
167
  )
168
+ yield log("πŸŽ‰ Done!"), f"## βœ… Success!\n\nPull Request created: [**{commit_info.pr_url}**]({commit_info.pr_url})"
169
  except Exception as e:
170
+ yield log(f"❌ Failed to create Pull Request: {str(e)}"), ""
171
 
172
  # --- UI ---
173
  with gr.Blocks(title="Gradio 6 Auto-Migrator") as demo:
 
187
  )
188
  btn = gr.Button("Migrate Space", variant="primary", scale=1)
189
 
190
+ with gr.Row():
191
+ with gr.Column(scale=1):
192
+ log_output = gr.Textbox(
193
+ label="Execution Log",
194
+ lines=10,
195
+ interactive=False,
196
+ autoscroll=True
197
+ )
198
+ with gr.Column(scale=1):
199
+ result_output = gr.Markdown(label="Result")
200
 
201
  btn.click(
202
+ fn=migrate_app_generator,
203
  inputs=[space_input],
204
+ outputs=[log_output, result_output]
205
  )
206
 
207
  if __name__ == "__main__":