File size: 1,520 Bytes
5f90001
8713898
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import gradio as gr
import os
import requests

# Hugging Face API config
HF_API_TOKEN = os.getenv("HF_TOKEN")  # Set this in Hugging Face Secrets
MODEL_ID = "rohitnagareddy/Qwen3-0.6B-Coding-Finetuned-v1"
API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"}

def query_hf_api(prompt):
    response = requests.post(API_URL, headers=HEADERS, json={"inputs": prompt})
    if response.status_code == 200:
        return response.json()[0]["generated_text"]
    return f"[Error] {response.status_code}: {response.text}"

def chat_fn(prompt, chat_history):
    response = query_hf_api(prompt)
    chat_history.append((prompt, response))
    return chat_history, chat_history

def download_chat(history):
    return "\n\n".join([f"You: {user}\nAI: {bot}" for user, bot in history])

with gr.Blocks(theme=gr.themes.Monochrome()) as demo:
    gr.Markdown("# 🧠 Qwen3 Coding Chatbot")
    with gr.Row():
        clear = gr.Button("🧹 Clear Chat")
        download = gr.Button("⬇️ Download Chat")
    
    chat = gr.Chatbot(label="Qwen3 Chat")
    msg = gr.Textbox(label="Your message")
    submit = gr.Button("🚀 Send")
    history = gr.State([])

    submit.click(chat_fn, [msg, history], [chat, history])
    msg.submit(chat_fn, [msg, history], [chat, history])
    clear.click(lambda: ([], []), None, [chat, history])
    download.click(download_chat, [history], file_name="chat.txt")

demo.launch(auth=[("Admin", "Password")])  # 🔒 Change this