ai-python-chat / app.py
himu1780's picture
Update app.py
e49dba0 verified
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
# 1. Load Your Smart Model (Step 1000)
MODEL_NAME = "himu1780/ai-python-model"
print(f"Loading {MODEL_NAME}...")
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
# 2. Create the Generator Pipeline
generator = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
device=-1 # Use CPU
)
def chat_function(user_message, history):
# 3. THE FIX: Force the prompt structure
# We add "```python" at the end. This TRICKS the model.
# It thinks: "Oh, I already started writing a code block, I must finish it."
prompt = f"### Instruction:\n{user_message}\n\n### Response:\n```python\n"
# 4. Generate with Anti-Loop Settings
response = generator(
prompt,
max_new_tokens=250, # Give it enough space to write a full function
temperature=0.4, # Low temperature = Focused logic
repetition_penalty=1.2, # STRICT penalty. Bans "Hello World" loops.
do_sample=True,
pad_token_id=tokenizer.eos_token_id
)
# 5. Clean up the output
full_text = response[0]['generated_text']
# Isolate just the new code
answer_part = full_text.split("### Response:")[-1].strip()
# Ensure the formatting looks nice in the chat
if not answer_part.startswith("```"):
answer_part = "```python\n" + answer_part
return answer_part
# 6. Launch the Chat Interface (Fixed: No theme argument)
demo = gr.ChatInterface(
fn=chat_function,
title="🐍 AI Python Graduate (Step 1000)",
description="I am trained! Ask me to 'Write a function to...' or 'Create a loop...'",
examples=[
"Write a python function to add two numbers",
"Create a loop from 1 to 10",
"Write a script to calculate the area of a circle"
]
)
demo.launch()