rahul7star commited on
Commit
81d6159
Β·
verified Β·
1 Parent(s): 286537c

Update app_quant_latent.py

Browse files
Files changed (1) hide show
  1. app_quant_latent.py +78 -55
app_quant_latent.py CHANGED
@@ -246,66 +246,89 @@ log_system_stats("AFTER PIPELINE BUILD")
246
 
247
 
248
 
 
249
  @spaces.GPU
250
  def generate_image(prompt, height, width, steps, seed, guidance_scale=0.0, return_latents=False):
 
 
 
 
 
 
 
 
 
 
 
 
251
 
252
- try:
253
- generator = torch.Generator(device).manual_seed(int(seed))
254
-
255
- # Try advanced latent preparation
256
  try:
257
- batch_size = 1
258
- num_channels_latents = getattr(pipe.unet, "in_channels", None)
259
- if num_channels_latents is None:
260
- raise AttributeError("pipe.unet.in_channels not found, fallback to standard pipeline")
261
-
262
- latents = pipe.prepare_latents(
263
- batch_size=batch_size,
264
- num_channels=num_channels_latents,
265
- height=height,
266
- width=width,
267
- dtype=torch.float32,
268
- device=device,
269
- generator=generator
270
- )
271
- log(f"βœ… Latents prepared: {latents.shape}")
272
-
273
- # Generate image using prepared latents
274
- output = pipe(
275
- prompt=prompt,
276
- height=height,
277
- width=width,
278
- num_inference_steps=steps,
279
- guidance_scale=guidance_scale,
280
- generator=generator,
281
- latents=latents
282
- )
283
-
284
- except Exception as e_inner:
285
- # If advanced method fails, fallback to standard pipeline
286
- log(f"⚠️ Advanced latent method failed: {e_inner}")
287
- log("πŸ” Falling back to standard pipeline...")
288
- output = pipe(
289
- prompt=prompt,
290
- height=height,
291
- width=width,
292
- num_inference_steps=steps,
293
- guidance_scale=guidance_scale,
294
- generator=generator
295
- )
296
-
297
- image = output.images[0]
298
- log("βœ… Inference finished successfully.")
299
-
300
- if return_latents and 'latents' in locals():
301
- return image, latents, LOGS
302
- else:
303
- return image, LOGS
304
-
305
- except Exception as e:
306
- log(f"❌ Inference failed entirely: {e}")
307
- return None, LOGS
 
 
 
 
 
 
 
 
 
 
 
308
 
 
 
 
309
 
310
 
311
  # ============================================================
 
246
 
247
 
248
 
249
+
250
  @spaces.GPU
251
  def generate_image(prompt, height, width, steps, seed, guidance_scale=0.0, return_latents=False):
252
+ """
253
+ Robust dual pipeline image generation:
254
+ - Tries advanced latent method first
255
+ - Falls back to standard pipeline if latent fails
256
+ - Always returns an image
257
+ - Optionally returns latents
258
+ - Logs progress and errors
259
+ """
260
+
261
+ LOGS = []
262
+ image = None
263
+ latents = None
264
 
 
 
 
 
265
  try:
266
+ generator = torch.Generator(device).manual_seed(int(seed))
267
+
268
+ # -------------------------------
269
+ # Attempt advanced latent generation
270
+ # -------------------------------
271
+ try:
272
+ batch_size = 1
273
+ num_channels_latents = getattr(pipe.unet, "in_channels", None)
274
+ if num_channels_latents is None:
275
+ raise AttributeError("pipe.unet.in_channels not found, fallback to standard pipeline")
276
+
277
+ latents = pipe.prepare_latents(
278
+ batch_size=batch_size,
279
+ num_channels=num_channels_latents,
280
+ height=height,
281
+ width=width,
282
+ dtype=torch.float32,
283
+ device=device,
284
+ generator=generator
285
+ )
286
+ LOGS.append(f"βœ… Latents prepared: {latents.shape}")
287
+
288
+ output = pipe(
289
+ prompt=prompt,
290
+ height=height,
291
+ width=width,
292
+ num_inference_steps=steps,
293
+ guidance_scale=guidance_scale,
294
+ generator=generator,
295
+ latents=latents
296
+ )
297
+ image = output.images[0]
298
+ LOGS.append("βœ… Advanced latent generation succeeded.")
299
+
300
+ # -------------------------------
301
+ # Fallback to standard pipeline
302
+ # -------------------------------
303
+ except Exception as e_latent:
304
+ LOGS.append(f"⚠️ Advanced latent generation failed: {e_latent}")
305
+ LOGS.append("πŸ” Falling back to standard pipeline...")
306
+ try:
307
+ output = pipe(
308
+ prompt=prompt,
309
+ height=height,
310
+ width=width,
311
+ num_inference_steps=steps,
312
+ guidance_scale=guidance_scale,
313
+ generator=generator
314
+ )
315
+ image = output.images[0]
316
+ LOGS.append("βœ… Standard pipeline generation succeeded.")
317
+ except Exception as e_standard:
318
+ LOGS.append(f"❌ Standard pipeline generation failed: {e_standard}")
319
+ # If everything fails, image remains None
320
+
321
+ # -------------------------------
322
+ # Return results
323
+ # -------------------------------
324
+ if return_latents and latents is not None:
325
+ return image, latents, LOGS
326
+ else:
327
+ return image, LOGS
328
 
329
+ except Exception as e:
330
+ LOGS.append(f"❌ Inference failed entirely: {e}")
331
+ return None, LOGS
332
 
333
 
334
  # ============================================================