Hoglet-33's picture
Update app.py
7a0a65b verified
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
MODEL_ID = "Lucid-research/lucentcode-1-py" # Change this to your model repo ID
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
model = AutoModelForCausalLM.from_pretrained(MODEL_ID)
def format_prompt(user_input):
return f"### Instruction:\n{user_input}\n\n### Output:\n"
def generate_code(user_input):
prompt = format_prompt(user_input)
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(
**inputs,
max_length=1000,
temperature=0.7,
do_sample=True,
top_p=0.9,
pad_token_id=tokenizer.eos_token_id,
)
text = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Return only the generated part after "### Output:"
return text.split("### Output:")[-1].strip()
iface = gr.Interface(
fn=generate_code,
inputs=gr.Textbox(lines=4, label="Instruction"),
outputs=gr.Textbox(lines=8, label="Generated Output"),
title="Python Generation With LucentCode-1-py",
description="Enter an instruction and get a generated Python function.",
)
if __name__ == "__main__":
iface.launch()