Spaces:
Running
Running
| import gradio as gr | |
| from transformers import pipeline | |
| # =============================== | |
| # Load only text-generation models (simpler, stable) | |
| # =============================== | |
| models = { | |
| "DistilGPT-2": pipeline("text-generation", model="distilgpt2"), | |
| "GPT2 (Small)": pipeline("text-generation", model="gpt2"), | |
| "DialoGPT-small": pipeline("text-generation", model="microsoft/DialoGPT-small"), | |
| "OPT-350M": pipeline("text-generation", model="facebook/opt-350m"), | |
| "Bloom-560M": pipeline("text-generation", model="bigscience/bloom-560m"), | |
| "GPT-Neo-125M": pipeline("text-generation", model="EleutherAI/gpt-neo-125M"), | |
| "Falcon-RW-1B": pipeline("text-generation", model="tiiuae/falcon-rw-1b"), | |
| } | |
| def compare_models(user_input, max_new_tokens=100, temperature=0.7, top_p=0.95): | |
| results = {} | |
| for name, generator in models.items(): | |
| try: | |
| output = generator( | |
| user_input, | |
| max_new_tokens=max_new_tokens, | |
| temperature=temperature, | |
| top_p=top_p, | |
| do_sample=True | |
| )[0]["generated_text"] | |
| results[name] = output | |
| except Exception as e: | |
| results[name] = f"⚠️ Error: {str(e)}" | |
| return [results[m] for m in models.keys()] | |
| # =============================== | |
| # Gradio UI | |
| # =============================== | |
| with gr.Blocks(css="style.css") as demo: | |
| gr.Markdown("## 🤖 Open-Source Model Comparator\n" | |
| "Compare outputs from multiple open-source LLMs side by side.\n" | |
| "These are raw, unfiltered outputs from Hugging Face models.") | |
| with gr.Row(): | |
| user_input = gr.Textbox(label="Your prompt", placeholder="Ask something like 'Write a short poem about the stars'...", lines=2) | |
| generate_btn = gr.Button("Generate", variant="primary") | |
| with gr.Row(): | |
| max_tokens = gr.Slider(20, 200, value=100, step=10, label="Max new tokens") | |
| temp = gr.Slider(0.1, 1.0, value=0.7, step=0.1, label="Creativity (temperature)") | |
| topp = gr.Slider(0.5, 1.0, value=0.95, step=0.05, label="Nucleus sampling (top_p)") | |
| with gr.Row(): | |
| outputs = [gr.Textbox(label=name, elem_classes="output-box", interactive=False) for name in models.keys()] | |
| examples = [ | |
| ["Explain quantum computing in simple terms."], | |
| ["Write a haiku about autumn leaves."], | |
| ["What are the pros and cons of nuclear energy?"], | |
| ["Describe a futuristic city in the year 2200."], | |
| ["Write a funny short story about a robot learning to cook."], | |
| ] | |
| gr.Examples(examples=examples, inputs=[user_input]) | |
| generate_btn.click(compare_models, inputs=[user_input, max_tokens, temp, topp], outputs=outputs) | |
| user_input.submit(compare_models, inputs=[user_input, max_tokens, temp, topp], outputs=outputs) | |
| if __name__ == "__main__": | |
| demo.launch() | |