Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -14,8 +14,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 14 |
|
| 15 |
# Load the model pipeline
|
| 16 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/Flux.1-Dev", torch_dtype=dtype).to(device)
|
| 17 |
-
pipe.transformer.
|
| 18 |
-
pipe.transformer.set_attn_processor(FlashFusedFluxAttnProcessor3_0())
|
| 19 |
|
| 20 |
@spaces.GPU(duration=1200)
|
| 21 |
def push_to_hub(repo_id, filename, oauth_token: gr.OAuthToken):
|
|
|
|
| 14 |
|
| 15 |
# Load the model pipeline
|
| 16 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/Flux.1-Dev", torch_dtype=dtype).to(device)
|
| 17 |
+
pipe.transformer.set_attn_processor(FlashFluxAttnProcessor3_0())
|
|
|
|
| 18 |
|
| 19 |
@spaces.GPU(duration=1200)
|
| 20 |
def push_to_hub(repo_id, filename, oauth_token: gr.OAuthToken):
|