rahul7star commited on
Commit
69e894e
Β·
verified Β·
1 Parent(s): d419dc7

Update app_quant_latent.py

Browse files
Files changed (1) hide show
  1. app_quant_latent.py +37 -31
app_quant_latent.py CHANGED
@@ -553,42 +553,46 @@ def safe_get_latents(pipe, height, width, generator, device, LOGS):
553
  @spaces.GPU
554
  def generate_image(prompt, height, width, steps, seed, guidance_scale=0.0):
555
  LOGS = []
556
- latents = None
557
- image = None
558
- gallery = []
559
 
560
- # placeholder image if all fails
561
  placeholder = Image.new("RGB", (width, height), color=(255, 255, 255))
562
- print(prompt)
 
563
 
564
  try:
565
- generator = torch.Generator(device).manual_seed(int(seed))
566
-
567
- # -------------------------------
568
- # Try advanced latent extraction
569
- # -------------------------------
570
  try:
571
  latents = safe_get_latents(pipe, height, width, generator, device, LOGS)
572
 
573
- output = pipe(
574
- prompt=prompt,
575
- height=height,
576
- width=width,
577
- num_inference_steps=steps,
578
- guidance_scale=guidance_scale,
579
- generator=generator,
580
- latents=latents
581
- )
582
 
583
- image = output.images[0]
584
- gallery = [image]
585
- LOGS.extend(getattr(pipe, "_latents_log", []))
 
 
 
 
 
 
 
 
 
 
586
  LOGS.append("βœ… Advanced latent pipeline succeeded.")
 
587
 
588
  except Exception as e:
589
- LOGS.append(f"⚠️ Latent mode failed: {e}")
590
  LOGS.append("πŸ” Switching to standard pipeline...")
591
 
 
592
  try:
593
  output = pipe(
594
  prompt=prompt,
@@ -598,21 +602,23 @@ def generate_image(prompt, height, width, steps, seed, guidance_scale=0.0):
598
  guidance_scale=guidance_scale,
599
  generator=generator,
600
  )
601
- image = output.images[0]
602
- gallery = [image]
 
603
  LOGS.append("βœ… Standard pipeline succeeded.")
 
604
 
605
  except Exception as e2:
606
  LOGS.append(f"❌ Standard pipeline failed: {e2}")
607
- image = placeholder
608
- gallery = [image]
609
-
610
- return image, gallery, LOGS
611
 
612
  except Exception as e:
613
  LOGS.append(f"❌ Total failure: {e}")
614
- return placeholder, [placeholder], LOGS
615
-
 
616
  @spaces.GPU
617
  def generate_image_backup(prompt, height, width, steps, seed, guidance_scale=0.0, return_latents=False):
618
  """
 
553
  @spaces.GPU
554
  def generate_image(prompt, height, width, steps, seed, guidance_scale=0.0):
555
  LOGS = []
556
+ device = "cuda"
557
+ generator = torch.Generator(device).manual_seed(int(seed))
 
558
 
559
+ # placeholders
560
  placeholder = Image.new("RGB", (width, height), color=(255, 255, 255))
561
+ latent_gallery = []
562
+ final_gallery = []
563
 
564
  try:
565
+ # --- Try advanced latent mode ---
 
 
 
 
566
  try:
567
  latents = safe_get_latents(pipe, height, width, generator, device, LOGS)
568
 
569
+ for i, t in enumerate(pipe.scheduler.timesteps):
570
+ # Step-wise denoising
571
+ with torch.no_grad():
572
+ noise_pred = pipe.unet(latents, t, encoder_hidden_states=pipe.get_text_embeddings(prompt))["sample"]
573
+ latents = pipe.scheduler.step(noise_pred, t, latents)["prev_sample"]
 
 
 
 
574
 
575
+ # Convert latent to preview image
576
+ try:
577
+ latent_img = latent_to_image(latents, pipe.vae)[0]
578
+ except Exception:
579
+ latent_img = placeholder
580
+ latent_gallery.append(latent_img)
581
+
582
+ # Yield intermediate update: latents updated, final gallery empty
583
+ yield None, latent_gallery, final_gallery, LOGS
584
+
585
+ # decode final image
586
+ final_img = pipe.decode_latents(latents)[0]
587
+ final_gallery.append(final_img)
588
  LOGS.append("βœ… Advanced latent pipeline succeeded.")
589
+ yield final_img, latent_gallery, final_gallery, LOGS
590
 
591
  except Exception as e:
592
+ LOGS.append(f"⚠️ Advanced latent mode failed: {e}")
593
  LOGS.append("πŸ” Switching to standard pipeline...")
594
 
595
+ # Standard pipeline fallback
596
  try:
597
  output = pipe(
598
  prompt=prompt,
 
602
  guidance_scale=guidance_scale,
603
  generator=generator,
604
  )
605
+ final_img = output.images[0]
606
+ final_gallery.append(final_img)
607
+ latent_gallery.append(final_img) # optionally show in latent gallery as last step
608
  LOGS.append("βœ… Standard pipeline succeeded.")
609
+ yield final_img, latent_gallery, final_gallery, LOGS
610
 
611
  except Exception as e2:
612
  LOGS.append(f"❌ Standard pipeline failed: {e2}")
613
+ final_gallery.append(placeholder)
614
+ latent_gallery.append(placeholder)
615
+ yield placeholder, latent_gallery, final_gallery, LOGS
 
616
 
617
  except Exception as e:
618
  LOGS.append(f"❌ Total failure: {e}")
619
+ final_gallery.append(placeholder)
620
+ latent_gallery.append(placeholder)
621
+ yield placeholder, latent_gallery, final_gallery, LOGS
622
  @spaces.GPU
623
  def generate_image_backup(prompt, height, width, steps, seed, guidance_scale=0.0, return_latents=False):
624
  """