Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
69c288d
1
Parent(s):
04452e3
update CPU offload
Browse files- app.py +2 -2
- pipelines/pipeline_flux_infusenet.py +13 -0
- pipelines/pipeline_infu_flux.py +5 -2
app.py
CHANGED
|
@@ -105,7 +105,7 @@ def prepare_pipeline(model_version, enable_realism, enable_anti_blur):
|
|
| 105 |
return pipeline
|
| 106 |
|
| 107 |
|
| 108 |
-
@spaces.GPU
|
| 109 |
def generate_image(
|
| 110 |
input_image,
|
| 111 |
control_image,
|
|
@@ -238,7 +238,7 @@ with gr.Blocks() as demo:
|
|
| 238 |
inputs=[ui_id_image, ui_control_image, ui_prompt_text, ui_seed, ui_enable_realism, ui_enable_anti_blur, ui_model_version],
|
| 239 |
outputs=[image_output],
|
| 240 |
fn=generate_examples,
|
| 241 |
-
cache_examples=True
|
| 242 |
)
|
| 243 |
|
| 244 |
ui_btn_generate.click(
|
|
|
|
| 105 |
return pipeline
|
| 106 |
|
| 107 |
|
| 108 |
+
@spaces.GPU(duration=120)
|
| 109 |
def generate_image(
|
| 110 |
input_image,
|
| 111 |
control_image,
|
|
|
|
| 238 |
inputs=[ui_id_image, ui_control_image, ui_prompt_text, ui_seed, ui_enable_realism, ui_enable_anti_blur, ui_model_version],
|
| 239 |
outputs=[image_output],
|
| 240 |
fn=generate_examples,
|
| 241 |
+
cache_examples=True
|
| 242 |
)
|
| 243 |
|
| 244 |
ui_btn_generate.click(
|
pipelines/pipeline_flux_infusenet.py
CHANGED
|
@@ -261,6 +261,9 @@ class FluxInfuseNetPipeline(FluxControlNetPipeline):
|
|
| 261 |
images.
|
| 262 |
"""
|
| 263 |
|
|
|
|
|
|
|
|
|
|
| 264 |
height = height or self.default_sample_size * self.vae_scale_factor
|
| 265 |
width = width or self.default_sample_size * self.vae_scale_factor
|
| 266 |
|
|
@@ -487,6 +490,11 @@ class FluxInfuseNetPipeline(FluxControlNetPipeline):
|
|
| 487 |
]
|
| 488 |
controlnet_keep.append(keeps[0] if isinstance(self.controlnet, FluxControlNetModel) else keeps)
|
| 489 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 490 |
# 7. Denoising loop
|
| 491 |
with self.progress_bar(total=num_inference_steps) as progress_bar:
|
| 492 |
for i, t in enumerate(timesteps):
|
|
@@ -591,6 +599,11 @@ class FluxInfuseNetPipeline(FluxControlNetPipeline):
|
|
| 591 |
|
| 592 |
if XLA_AVAILABLE:
|
| 593 |
xm.mark_step()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 594 |
|
| 595 |
if output_type == "latent":
|
| 596 |
image = latents
|
|
|
|
| 261 |
images.
|
| 262 |
"""
|
| 263 |
|
| 264 |
+
# CPU offload controlnet
|
| 265 |
+
self.controlnet.cpu()
|
| 266 |
+
|
| 267 |
height = height or self.default_sample_size * self.vae_scale_factor
|
| 268 |
width = width or self.default_sample_size * self.vae_scale_factor
|
| 269 |
|
|
|
|
| 490 |
]
|
| 491 |
controlnet_keep.append(keeps[0] if isinstance(self.controlnet, FluxControlNetModel) else keeps)
|
| 492 |
|
| 493 |
+
# CPU offload T5, move back controlnet to GPU
|
| 494 |
+
self.text_encoder_2.cpu()
|
| 495 |
+
torch.cuda.empty_cache()
|
| 496 |
+
self.controlnet.to(device)
|
| 497 |
+
|
| 498 |
# 7. Denoising loop
|
| 499 |
with self.progress_bar(total=num_inference_steps) as progress_bar:
|
| 500 |
for i, t in enumerate(timesteps):
|
|
|
|
| 599 |
|
| 600 |
if XLA_AVAILABLE:
|
| 601 |
xm.mark_step()
|
| 602 |
+
|
| 603 |
+
# CPU offload controlnet, move back T5 to GPU
|
| 604 |
+
self.controlnet.cpu()
|
| 605 |
+
torch.cuda.empty_cache()
|
| 606 |
+
self.text_encoder_2.to(device)
|
| 607 |
|
| 608 |
if output_type == "latent":
|
| 609 |
image = latents
|
pipelines/pipeline_infu_flux.py
CHANGED
|
@@ -167,8 +167,11 @@ class InfUFluxPipeline:
|
|
| 167 |
'After that, run the code again. If you have downloaded it, please use `base_model_path` to specify the correct path.')
|
| 168 |
print('\nIf you are using other models, please download them to a local directory and use `base_model_path` to specify the correct path.')
|
| 169 |
exit()
|
| 170 |
-
|
| 171 |
-
|
|
|
|
|
|
|
|
|
|
| 172 |
self.pipe = pipe
|
| 173 |
|
| 174 |
# Load image proj model
|
|
|
|
| 167 |
'After that, run the code again. If you have downloaded it, please use `base_model_path` to specify the correct path.')
|
| 168 |
print('\nIf you are using other models, please download them to a local directory and use `base_model_path` to specify the correct path.')
|
| 169 |
exit()
|
| 170 |
+
pipe.to('cuda', torch.bfloat16)
|
| 171 |
+
# CPU offload controlnet in advance
|
| 172 |
+
pipe.controlnet.cpu()
|
| 173 |
+
torch.cuda.empty_cache()
|
| 174 |
+
# pipe.enable_model_cpu_offload()
|
| 175 |
self.pipe = pipe
|
| 176 |
|
| 177 |
# Load image proj model
|