akhaliq HF Staff commited on
Commit
254f500
·
1 Parent(s): 5db8e53

add opus and remove some models and gradio 6 update

Browse files
backend_api.py CHANGED
@@ -98,16 +98,14 @@ AVAILABLE_MODELS = [
98
  {"name": "MiniMax M2", "id": "MiniMaxAI/MiniMax-M2", "description": "MiniMax M2 model via HuggingFace InferenceClient with Novita provider"},
99
  {"name": "DeepSeek V3.2-Exp", "id": "deepseek-ai/DeepSeek-V3.2-Exp", "description": "DeepSeek V3.2 Experimental via HuggingFace"},
100
  {"name": "DeepSeek R1", "id": "deepseek-ai/DeepSeek-R1-0528", "description": "DeepSeek R1 model for code generation"},
101
- {"name": "GPT-5", "id": "gpt-5", "description": "OpenAI GPT-5 via OpenRouter"},
102
  {"name": "GPT-5.1", "id": "gpt-5.1", "description": "OpenAI GPT-5.1 model via Poe for advanced code generation and general tasks"},
103
  {"name": "GPT-5.1 Instant", "id": "gpt-5.1-instant", "description": "OpenAI GPT-5.1 Instant model via Poe for fast responses"},
104
  {"name": "GPT-5.1 Codex", "id": "gpt-5.1-codex", "description": "OpenAI GPT-5.1 Codex model via Poe optimized for code generation"},
 
105
  {"name": "Claude-Sonnet-4.5", "id": "claude-sonnet-4.5", "description": "Anthropic Claude Sonnet 4.5 via Poe (OpenAI-compatible)"},
106
  {"name": "Claude-Haiku-4.5", "id": "claude-haiku-4.5", "description": "Anthropic Claude Haiku 4.5 via Poe (OpenAI-compatible)"},
107
  {"name": "Kimi K2 Thinking", "id": "moonshotai/Kimi-K2-Thinking", "description": "Moonshot Kimi K2 Thinking model via HuggingFace with Together AI provider"},
108
  {"name": "GLM-4.6", "id": "zai-org/GLM-4.6", "description": "GLM-4.6 model via HuggingFace with Cerebras provider"},
109
- {"name": "Gemini Flash Latest", "id": "gemini-flash-latest", "description": "Google Gemini Flash via OpenRouter"},
110
- {"name": "Qwen3 Max Preview", "id": "qwen3-max-preview", "description": "Qwen3 Max Preview via DashScope API"},
111
  ]
112
 
113
  # Cache model lookup for faster access (built after AVAILABLE_MODELS is defined)
@@ -194,7 +192,7 @@ async def startup_event():
194
  class CodeGenerationRequest(BaseModel):
195
  query: str
196
  language: str = "html"
197
- model_id: str = "MiniMaxAI/MiniMax-M2"
198
  provider: str = "auto"
199
  history: List[List[str]] = []
200
  agent_mode: bool = False
@@ -1285,7 +1283,7 @@ async def websocket_generate(websocket: WebSocket):
1285
 
1286
  query = data.get("query")
1287
  language = data.get("language", "html")
1288
- model_id = data.get("model_id", "MiniMaxAI/MiniMax-M2")
1289
 
1290
  # Send acknowledgment
1291
  await websocket.send_json({
 
98
  {"name": "MiniMax M2", "id": "MiniMaxAI/MiniMax-M2", "description": "MiniMax M2 model via HuggingFace InferenceClient with Novita provider"},
99
  {"name": "DeepSeek V3.2-Exp", "id": "deepseek-ai/DeepSeek-V3.2-Exp", "description": "DeepSeek V3.2 Experimental via HuggingFace"},
100
  {"name": "DeepSeek R1", "id": "deepseek-ai/DeepSeek-R1-0528", "description": "DeepSeek R1 model for code generation"},
 
101
  {"name": "GPT-5.1", "id": "gpt-5.1", "description": "OpenAI GPT-5.1 model via Poe for advanced code generation and general tasks"},
102
  {"name": "GPT-5.1 Instant", "id": "gpt-5.1-instant", "description": "OpenAI GPT-5.1 Instant model via Poe for fast responses"},
103
  {"name": "GPT-5.1 Codex", "id": "gpt-5.1-codex", "description": "OpenAI GPT-5.1 Codex model via Poe optimized for code generation"},
104
+ {"name": "Claude-Opus-4.5", "id": "claude-opus-4.5", "description": "Anthropic Claude Opus 4.5 via Poe (OpenAI-compatible)"},
105
  {"name": "Claude-Sonnet-4.5", "id": "claude-sonnet-4.5", "description": "Anthropic Claude Sonnet 4.5 via Poe (OpenAI-compatible)"},
106
  {"name": "Claude-Haiku-4.5", "id": "claude-haiku-4.5", "description": "Anthropic Claude Haiku 4.5 via Poe (OpenAI-compatible)"},
107
  {"name": "Kimi K2 Thinking", "id": "moonshotai/Kimi-K2-Thinking", "description": "Moonshot Kimi K2 Thinking model via HuggingFace with Together AI provider"},
108
  {"name": "GLM-4.6", "id": "zai-org/GLM-4.6", "description": "GLM-4.6 model via HuggingFace with Cerebras provider"},
 
 
109
  ]
110
 
111
  # Cache model lookup for faster access (built after AVAILABLE_MODELS is defined)
 
192
  class CodeGenerationRequest(BaseModel):
193
  query: str
194
  language: str = "html"
195
+ model_id: str = "claude-opus-4.5"
196
  provider: str = "auto"
197
  history: List[List[str]] = []
198
  agent_mode: bool = False
 
1283
 
1284
  query = data.get("query")
1285
  language = data.get("language", "html")
1286
+ model_id = data.get("model_id", "claude-opus-4.5")
1287
 
1288
  # Send acknowledgment
1289
  await websocket.send_json({
backend_docs_manager.py CHANGED
@@ -249,7 +249,46 @@ def build_gradio_system_prompt() -> str:
249
  docs_content = get_gradio_docs_content()
250
 
251
  # Base system prompt with anycoder-specific instructions
252
- base_prompt = """You are an expert Gradio developer. Create a complete, working Gradio application based on the user's request. Generate all necessary code to make the application functional and runnable.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
 
254
  ## Multi-File Application Structure
255
 
@@ -304,13 +343,21 @@ Below is the complete, official Gradio 6 documentation automatically synced from
304
 
305
  ---
306
 
307
- ## Final Instructions
308
 
309
- - Always use the exact function signatures and patterns from the Gradio 6 documentation above
310
- - Follow Gradio 6 migration guidelines if you're familiar with older versions
311
- - Use modern Gradio 6 API patterns (e.g., footer_links instead of show_api, api_visibility instead of show_api in events)
312
- - Generate production-ready code that follows all best practices
313
- - Always include the "Built with anycoder" attribution in the header
 
 
 
 
 
 
 
 
314
 
315
  """
316
 
 
249
  docs_content = get_gradio_docs_content()
250
 
251
  # Base system prompt with anycoder-specific instructions
252
+ base_prompt = """🚨 CRITICAL: You are an expert Gradio 6 developer. You MUST use Gradio 6 syntax and API.
253
+
254
+ ## Key Gradio 6 Changes (MUST FOLLOW):
255
+ - Use `footer_links` parameter instead of removed `show_api` in gr.Blocks()
256
+ - Use `api_visibility` instead of `api_name` in event listeners
257
+ - Use modern Gradio 6 component syntax (check documentation below)
258
+ - Gradio 6 has updated component APIs - always refer to the documentation below
259
+ - DO NOT use deprecated Gradio 5 or older syntax
260
+
261
+ Create a complete, working Gradio 6 application based on the user's request. Generate all necessary code to make the application functional and runnable.
262
+
263
+ ## Gradio 6 Example (Your Code Should Follow This Pattern):
264
+
265
+ ```python
266
+ import gradio as gr
267
+
268
+ def process(text):
269
+ return f"Processed: {text}"
270
+
271
+ # Gradio 6 Blocks with footer_links (NOT show_api)
272
+ with gr.Blocks(
273
+ title="My App",
274
+ footer_links=[{"label": "Built with anycoder", "url": "https://huggingface.co/spaces/akhaliq/anycoder"}]
275
+ ) as demo:
276
+ with gr.Row():
277
+ input_text = gr.Textbox(label="Input")
278
+ output_text = gr.Textbox(label="Output")
279
+
280
+ btn = gr.Button("Process")
281
+
282
+ # Gradio 6 events use api_visibility (NOT just api_name)
283
+ btn.click(
284
+ fn=process,
285
+ inputs=[input_text],
286
+ outputs=[output_text],
287
+ api_visibility="public" # Gradio 6 syntax
288
+ )
289
+
290
+ demo.launch()
291
+ ```
292
 
293
  ## Multi-File Application Structure
294
 
 
343
 
344
  ---
345
 
346
+ ## 🚨 CRITICAL FINAL INSTRUCTIONS - GRADIO 6 ONLY
347
 
348
+ YOU MUST USE GRADIO 6 SYNTAX. This is MANDATORY:
349
+
350
+ 1. **ONLY use Gradio 6 API** - Do NOT use Gradio 5 or older syntax
351
+ 2. **Reference the documentation above** - All function signatures and patterns are from Gradio 6
352
+ 3. **Use modern Gradio 6 patterns:**
353
+ - Use `footer_links` parameter in gr.Blocks() (NOT show_api)
354
+ - Use `api_visibility` in event listeners (NOT api_name alone)
355
+ - Use updated component syntax from Gradio 6 documentation
356
+ 4. **Follow Gradio 6 migration guide** if you see any deprecated patterns
357
+ 5. **Generate production-ready Gradio 6 code** that follows all best practices
358
+ 6. **Always include "Built with anycoder"** as clickable text in the header linking to https://huggingface.co/spaces/akhaliq/anycoder
359
+
360
+ REMINDER: You are writing Gradio 6 code. Double-check all syntax against the Gradio 6 documentation provided above.
361
 
362
  """
363
 
backend_models.py CHANGED
@@ -50,13 +50,6 @@ def get_inference_client(model_id: str, provider: str = "auto"):
50
  base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
51
  )
52
 
53
- elif model_id == "gpt-5":
54
- # Use Poe (OpenAI-compatible) client for GPT-5 model
55
- return OpenAI(
56
- api_key=os.getenv("POE_API_KEY"),
57
- base_url="https://api.poe.com/v1"
58
- )
59
-
60
  elif model_id == "gpt-5.1":
61
  # Use Poe (OpenAI-compatible) client for GPT-5.1 model
62
  return OpenAI(
@@ -106,6 +99,13 @@ def get_inference_client(model_id: str, provider: str = "auto"):
106
  base_url="https://api.poe.com/v1"
107
  )
108
 
 
 
 
 
 
 
 
109
  elif model_id == "claude-sonnet-4.5":
110
  # Use Poe (OpenAI-compatible) client for Claude-Sonnet-4.5
111
  return OpenAI(
@@ -120,13 +120,6 @@ def get_inference_client(model_id: str, provider: str = "auto"):
120
  base_url="https://api.poe.com/v1"
121
  )
122
 
123
- elif model_id == "qwen3-max-preview":
124
- # Use DashScope International OpenAI client for Qwen3 Max Preview
125
- return OpenAI(
126
- api_key=os.getenv("DASHSCOPE_API_KEY"),
127
- base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
128
- )
129
-
130
  elif model_id == "x-ai/grok-4.1-fast":
131
  # Use OpenRouter client for Grok 4.1 Fast model
132
  return OpenAI(
@@ -174,13 +167,6 @@ def get_inference_client(model_id: str, provider: str = "auto"):
174
  base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
175
  )
176
 
177
- elif model_id == "gemini-flash-latest":
178
- # Use Google Gemini Flash Latest (OpenAI-compatible) client
179
- return OpenAI(
180
- api_key=os.getenv("GEMINI_API_KEY"),
181
- base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
182
- )
183
-
184
  elif model_id == "gemini-flash-lite-latest":
185
  # Use Google Gemini Flash Lite Latest (OpenAI-compatible) client
186
  return OpenAI(
 
50
  base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
51
  )
52
 
 
 
 
 
 
 
 
53
  elif model_id == "gpt-5.1":
54
  # Use Poe (OpenAI-compatible) client for GPT-5.1 model
55
  return OpenAI(
 
99
  base_url="https://api.poe.com/v1"
100
  )
101
 
102
+ elif model_id == "claude-opus-4.5":
103
+ # Use Poe (OpenAI-compatible) client for Claude-Opus-4.5
104
+ return OpenAI(
105
+ api_key=os.getenv("POE_API_KEY"),
106
+ base_url="https://api.poe.com/v1"
107
+ )
108
+
109
  elif model_id == "claude-sonnet-4.5":
110
  # Use Poe (OpenAI-compatible) client for Claude-Sonnet-4.5
111
  return OpenAI(
 
120
  base_url="https://api.poe.com/v1"
121
  )
122
 
 
 
 
 
 
 
 
123
  elif model_id == "x-ai/grok-4.1-fast":
124
  # Use OpenRouter client for Grok 4.1 Fast model
125
  return OpenAI(
 
167
  base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
168
  )
169
 
 
 
 
 
 
 
 
170
  elif model_id == "gemini-flash-lite-latest":
171
  # Use Google Gemini Flash Lite Latest (OpenAI-compatible) client
172
  return OpenAI(
frontend/src/app/page.tsx CHANGED
@@ -17,7 +17,7 @@ export default function Home() {
17
 
18
  const [generatedCode, setGeneratedCode] = useState('');
19
  const [selectedLanguage, setSelectedLanguage] = useState<Language>('html');
20
- const [selectedModel, setSelectedModel] = useState('zai-org/GLM-4.6');
21
  const [isGenerating, setIsGenerating] = useState(false);
22
  const [isAuthenticated, setIsAuthenticated] = useState(false);
23
  const [currentRepoId, setCurrentRepoId] = useState<string | null>(null); // Track imported/deployed space
 
17
 
18
  const [generatedCode, setGeneratedCode] = useState('');
19
  const [selectedLanguage, setSelectedLanguage] = useState<Language>('html');
20
+ const [selectedModel, setSelectedModel] = useState('claude-opus-4.5');
21
  const [isGenerating, setIsGenerating] = useState(false);
22
  const [isAuthenticated, setIsAuthenticated] = useState(false);
23
  const [currentRepoId, setCurrentRepoId] = useState<string | null>(null); // Track imported/deployed space
frontend/src/components/LandingPage.tsx CHANGED
@@ -28,7 +28,7 @@ export default function LandingPage({
28
  onImport,
29
  isAuthenticated,
30
  initialLanguage = 'html',
31
- initialModel = 'zai-org/GLM-4.6',
32
  onAuthChange
33
  }: LandingPageProps) {
34
  const [prompt, setPrompt] = useState('');
 
28
  onImport,
29
  isAuthenticated,
30
  initialLanguage = 'html',
31
+ initialModel = 'claude-opus-4.5',
32
  onAuthChange
33
  }: LandingPageProps) {
34
  const [prompt, setPrompt] = useState('');