Spaces:
Paused
Paused
Commit
·
cbb7feb
1
Parent(s):
95108a9
Update app.py
Browse files
app.py
CHANGED
|
@@ -67,7 +67,7 @@ pipe = StableDiffusionXLPipeline.from_pretrained(
|
|
| 67 |
)
|
| 68 |
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
|
| 69 |
pipe.to(device)
|
| 70 |
-
|
| 71 |
last_lora = ""
|
| 72 |
last_merged = False
|
| 73 |
last_fused = False
|
|
@@ -166,14 +166,11 @@ def run_lora(prompt, negative, lora_scale, selected_state, sdxl_loras, sdxl_lora
|
|
| 166 |
loaded_state_dict = copy.deepcopy(state_dicts[repo_name]["state_dict"])
|
| 167 |
cross_attention_kwargs = None
|
| 168 |
if last_lora != repo_name:
|
| 169 |
-
if(last_fused):
|
| 170 |
-
pipe.unfuse_lora()
|
| 171 |
-
pipe.load_lora_weights(loaded_state_dict, adapter_name="
|
| 172 |
-
pipe.
|
| 173 |
-
|
| 174 |
-
pipe.fuse_lora()
|
| 175 |
-
pipe.unload_lora_weights()
|
| 176 |
-
last_fused = True
|
| 177 |
is_pivotal = sdxl_loras[selected_state.index]["is_pivotal"]
|
| 178 |
if(is_pivotal):
|
| 179 |
#Add the textual inversion embeddings from pivotal tuning models
|
|
|
|
| 67 |
)
|
| 68 |
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
|
| 69 |
pipe.to(device)
|
| 70 |
+
pipe.load_lora_weights(lcm_lora_id, weight_name="lcm_sdxl_lora.safetensors", adapter_name="lcm_lora", use_auth_token=os.getenv('HF_TOKEN'))
|
| 71 |
last_lora = ""
|
| 72 |
last_merged = False
|
| 73 |
last_fused = False
|
|
|
|
| 166 |
loaded_state_dict = copy.deepcopy(state_dicts[repo_name]["state_dict"])
|
| 167 |
cross_attention_kwargs = None
|
| 168 |
if last_lora != repo_name:
|
| 169 |
+
#if(last_fused):
|
| 170 |
+
#pipe.unfuse_lora()
|
| 171 |
+
pipe.load_lora_weights(loaded_state_dict, adapter_name=state_dicts[repo_name]["saved_name"])
|
| 172 |
+
pipe.set_adapters([state_dicts[repo_name]["saved_name"], "lcm_lora"], adapter_weights=[0.8, 1.0])
|
| 173 |
+
#last_fused = True
|
|
|
|
|
|
|
|
|
|
| 174 |
is_pivotal = sdxl_loras[selected_state.index]["is_pivotal"]
|
| 175 |
if(is_pivotal):
|
| 176 |
#Add the textual inversion embeddings from pivotal tuning models
|