Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -378,7 +378,7 @@ class GuiSD:
|
|
| 378 |
face_restoration_visibility,
|
| 379 |
face_restoration_weight,
|
| 380 |
):
|
| 381 |
-
info_state = html_template_message("
|
| 382 |
yield info_state, gr.update(), gr.update()
|
| 383 |
|
| 384 |
vae_model = vae_model if vae_model != "None" else None
|
|
@@ -562,42 +562,40 @@ class GuiSD:
|
|
| 562 |
|
| 563 |
actual_progress = 0
|
| 564 |
info_images = gr.update()
|
| 565 |
-
|
| 566 |
-
|
| 567 |
-
|
| 568 |
-
|
| 569 |
-
|
| 570 |
-
|
| 571 |
-
|
| 572 |
-
|
| 573 |
-
|
| 574 |
-
|
| 575 |
-
|
| 576 |
-
|
| 577 |
-
|
| 578 |
-
|
| 579 |
-
|
| 580 |
-
|
| 581 |
-
|
| 582 |
-
|
| 583 |
-
|
| 584 |
-
|
| 585 |
-
|
| 586 |
-
|
| 587 |
-
|
| 588 |
-
|
| 589 |
-
|
| 590 |
-
|
| 591 |
-
|
| 592 |
-
|
| 593 |
-
|
| 594 |
-
|
| 595 |
-
if save_generated_images:
|
| 596 |
-
info_images += f"<br>{download_links}"
|
| 597 |
|
| 598 |
-
|
| 599 |
|
| 600 |
-
|
| 601 |
|
| 602 |
|
| 603 |
def dynamic_gpu_duration(func, duration, *args):
|
|
|
|
| 378 |
face_restoration_visibility,
|
| 379 |
face_restoration_weight,
|
| 380 |
):
|
| 381 |
+
info_state = html_template_message("Processing...")
|
| 382 |
yield info_state, gr.update(), gr.update()
|
| 383 |
|
| 384 |
vae_model = vae_model if vae_model != "None" else None
|
|
|
|
| 562 |
|
| 563 |
actual_progress = 0
|
| 564 |
info_images = gr.update()
|
| 565 |
+
img, [seed, image_path, metadata] = self.model(**pipe_params)
|
| 566 |
+
|
| 567 |
+
info_images = f"Seeds: {str(seed)}"
|
| 568 |
+
if vae_msg:
|
| 569 |
+
info_images = info_images + "<br>" + vae_msg
|
| 570 |
+
|
| 571 |
+
if "Cannot copy out of meta tensor; no data!" in self.model.last_lora_error:
|
| 572 |
+
msg_ram = "Unable to process the LoRAs due to high RAM usage; please try again later."
|
| 573 |
+
print(msg_ram)
|
| 574 |
+
msg_lora += f"<br>{msg_ram}"
|
| 575 |
+
|
| 576 |
+
for status, lora in zip(self.model.lora_status, self.model.lora_memory):
|
| 577 |
+
if status:
|
| 578 |
+
msg_lora += f"<br>Loaded: {lora}"
|
| 579 |
+
elif status is not None:
|
| 580 |
+
msg_lora += f"<br>Error with: {lora}"
|
| 581 |
+
|
| 582 |
+
if msg_lora:
|
| 583 |
+
info_images += msg_lora
|
| 584 |
+
|
| 585 |
+
info_images = info_images + "<br>" + "GENERATION DATA:<br>" + escape_html(metadata[-1]) + "<br>-------<br>"
|
| 586 |
+
|
| 587 |
+
download_links = "<br>".join(
|
| 588 |
+
[
|
| 589 |
+
f'<a href="{path.replace("/images/", "/file=/home/user/app/images/")}" download="{os.path.basename(path)}">Download Image {i + 1}</a>'
|
| 590 |
+
for i, path in enumerate(image_path)
|
| 591 |
+
]
|
| 592 |
+
)
|
| 593 |
+
if save_generated_images:
|
| 594 |
+
info_images += f"<br>{download_links}"
|
|
|
|
|
|
|
| 595 |
|
| 596 |
+
info_state = "COMPLETE"
|
| 597 |
|
| 598 |
+
return info_state, img, info_images
|
| 599 |
|
| 600 |
|
| 601 |
def dynamic_gpu_duration(func, duration, *args):
|