Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -20,9 +20,9 @@ pipe = FluxWithCFGPipeline.from_pretrained(
|
|
| 20 |
"black-forest-labs/FLUX.1-schnell", torch_dtype=dtype
|
| 21 |
)
|
| 22 |
pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype)
|
| 23 |
-
pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
|
| 24 |
-
pipe.set_adapters("fast")
|
| 25 |
-
pipe.fuse_lora(adapter_names=["fast"], lora_scale=1.0)
|
| 26 |
pipe.to("cuda")
|
| 27 |
# pipe.transformer.to(memory_format=torch.channels_last)
|
| 28 |
# pipe.transformer = torch.compile(
|
|
|
|
| 20 |
"black-forest-labs/FLUX.1-schnell", torch_dtype=dtype
|
| 21 |
)
|
| 22 |
pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype)
|
| 23 |
+
# pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
|
| 24 |
+
# pipe.set_adapters("fast")
|
| 25 |
+
# pipe.fuse_lora(adapter_names=["fast"], lora_scale=1.0)
|
| 26 |
pipe.to("cuda")
|
| 27 |
# pipe.transformer.to(memory_format=torch.channels_last)
|
| 28 |
# pipe.transformer = torch.compile(
|