Spaces:
Running
on
Zero
Running
on
Zero
| import gradio as gr | |
| import numpy as np | |
| import random | |
| import spaces | |
| from diffusers import DiffusionPipeline | |
| import torch | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| model_repo_id = "stabilityai/stable-diffusion-3.5-large" | |
| if torch.cuda.is_available(): | |
| torch_dtype = torch.bfloat16 | |
| else: | |
| torch_dtype = torch.float32 | |
| pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype) | |
| pipe = pipe.to(device) | |
| MAX_SEED = np.iinfo(np.int32).max | |
| MAX_IMAGE_SIZE = 1024 | |
| def infer( | |
| prompt, | |
| negative_prompt="", | |
| seed=42, | |
| randomize_seed=False, | |
| width=1024, | |
| height=1024, | |
| guidance_scale=4.5, | |
| num_inference_steps=40, | |
| progress=gr.Progress(track_tqdm=True), | |
| ): | |
| if randomize_seed: | |
| seed = random.randint(0, MAX_SEED) | |
| generator = torch.Generator().manual_seed(seed) | |
| image = pipe( | |
| prompt=prompt, | |
| negative_prompt=negative_prompt, | |
| guidance_scale=guidance_scale, | |
| num_inference_steps=num_inference_steps, | |
| width=width, | |
| height=height, | |
| generator=generator, | |
| ).images[0] | |
| return image, seed | |
| # Enhanced examples with creative prompts | |
| examples = [ | |
| "A capybara wearing a suit holding a sign that reads Hello World", | |
| "A steampunk-style flying ship made of brass and wood, floating through cotton candy clouds", | |
| "A magical library where books are flying and glowing, with a wise owl librarian", | |
| "A cyberpunk street food vendor selling neon-colored dumplings in the rain", | |
| "A group of penguins having a formal tea party in the Antarctic", | |
| "A treehouse city at sunset with bioluminescent plants and floating lanterns" | |
| ] | |
| # Custom CSS with modern styling | |
| css = """ | |
| :root { | |
| --primary-color: #7B2CBF; | |
| --secondary-color: #9D4EDD; | |
| --background-color: #10002B; | |
| --text-color: #E0AAFF; | |
| --card-bg: #240046; | |
| } | |
| #col-container { | |
| max-width: 850px !important; | |
| margin: 0 auto; | |
| padding: 20px; | |
| background: var(--background-color); | |
| border-radius: 15px; | |
| box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
| } | |
| .main-title { | |
| color: var(--text-color) !important; | |
| text-align: center; | |
| font-size: 2.5em !important; | |
| margin-bottom: 1em !important; | |
| text-shadow: 2px 2px 4px rgba(0, 0, 0, 0.3); | |
| } | |
| .gradio-container { | |
| background: var(--background-color) !important; | |
| color: var(--text-color) !important; | |
| } | |
| .gr-button { | |
| background: var(--primary-color) !important; | |
| border: none !important; | |
| color: white !important; | |
| transition: transform 0.2s !important; | |
| } | |
| .gr-button:hover { | |
| transform: translateY(-2px) !important; | |
| background: var(--secondary-color) !important; | |
| } | |
| .gr-input, .gr-box { | |
| background: var(--card-bg) !important; | |
| border: 1px solid var(--primary-color) !important; | |
| color: var(--text-color) !important; | |
| } | |
| .footer-custom a { | |
| color: var(--text-color); | |
| text-decoration: none; | |
| margin: 0 10px; | |
| transition: color 0.3s; | |
| } | |
| .footer-custom a:hover { | |
| color: var(--secondary-color); | |
| text-decoration: underline; | |
| } | |
| """ | |
| # Footer HTML | |
| footer = """ | |
| <div class="footer-custom" style="text-align: center; margin-top: 20px; color: #f8f8f2;"> | |
| <a href="https://www.linkedin.com/in/pejman-ebrahimi-4a60151a7/" target="_blank">LinkedIn</a> | | |
| <a href="https://github.com/arad1367" target="_blank">GitHub</a> | | |
| <a href="https://arad1367.pythonanywhere.com/" target="_blank">Live demo of my PhD defense</a> | | |
| <a href="https://huggingface.co/stabilityai/stable-diffusion-3.5-large" target="_blank">stable-diffusion-3.5-large model</a> | | |
| <a href="https://huggingface.co/spaces/stabilityai/stable-diffusion-3.5-large-turbo" target="_blank">stable-diffusion-3.5-large-turbo</a> | | |
| <a href="https://stability.ai/license" target="_blank">Stability.ai licence</a> | |
| <br> | |
| <p style="margin-top: 10px;">Made with π by Pejman Ebrahimi</p> | |
| </div> | |
| """ | |
| with gr.Blocks(css=css) as demo: | |
| with gr.Column(elem_id="col-container"): | |
| gr.HTML( | |
| '<h1 class="main-title">Stable Diffusion 3.5 Large (8B)</h1>' | |
| '<div style="text-align: center; margin-bottom: 20px;">' | |
| '<a href="https://stability.ai" target="_blank" style="color: #E0AAFF;">Visit Stability.ai</a>' | |
| '</div>' | |
| ) | |
| with gr.Row(): | |
| prompt = gr.Text( | |
| label="Prompt", | |
| show_label=False, | |
| max_lines=1, | |
| placeholder="Enter your prompt", | |
| container=False, | |
| ) | |
| run_button = gr.Button("Generate", scale=0, variant="primary") | |
| result = gr.Image(label="Result", show_label=False) | |
| with gr.Accordion("Advanced Settings", open=False): | |
| negative_prompt = gr.Text( | |
| label="Negative prompt", | |
| max_lines=1, | |
| placeholder="Enter a negative prompt", | |
| visible=False, | |
| ) | |
| seed = gr.Slider( | |
| label="Seed", | |
| minimum=0, | |
| maximum=MAX_SEED, | |
| step=1, | |
| value=0, | |
| ) | |
| randomize_seed = gr.Checkbox(label="Randomize seed", value=True) | |
| with gr.Row(): | |
| width = gr.Slider( | |
| label="Width", | |
| minimum=512, | |
| maximum=MAX_IMAGE_SIZE, | |
| step=32, | |
| value=1024, | |
| ) | |
| height = gr.Slider( | |
| label="Height", | |
| minimum=512, | |
| maximum=MAX_IMAGE_SIZE, | |
| step=32, | |
| value=1024, | |
| ) | |
| with gr.Row(): | |
| guidance_scale = gr.Slider( | |
| label="Guidance scale", | |
| minimum=0.0, | |
| maximum=7.5, | |
| step=0.1, | |
| value=4.5, | |
| ) | |
| num_inference_steps = gr.Slider( | |
| label="Number of inference steps", | |
| minimum=1, | |
| maximum=50, | |
| step=1, | |
| value=40, | |
| ) | |
| gr.Examples( | |
| examples=examples, | |
| inputs=[prompt], | |
| outputs=[result, seed], | |
| fn=infer, | |
| cache_examples=True, | |
| cache_mode="lazy" | |
| ) | |
| gr.HTML(footer) | |
| gr.on( | |
| triggers=[run_button.click, prompt.submit], | |
| fn=infer, | |
| inputs=[ | |
| prompt, | |
| negative_prompt, | |
| seed, | |
| randomize_seed, | |
| width, | |
| height, | |
| guidance_scale, | |
| num_inference_steps, | |
| ], | |
| outputs=[result, seed], | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |