Spaces:
Paused
Paused
Update main.py
Browse files
main.py
CHANGED
|
@@ -18,8 +18,14 @@ device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
|
| 18 |
# Load HF token from environment variable
|
| 19 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 20 |
|
|
|
|
|
|
|
|
|
|
| 21 |
# Function to load pipeline dynamically
|
| 22 |
def load_pipeline(model_name: str):
|
|
|
|
|
|
|
|
|
|
| 23 |
if model_name == "Fluently XL Final":
|
| 24 |
pipe = StableDiffusionXLPipeline.from_single_file(
|
| 25 |
hf_hub_download(repo_id="fluently/Fluently-XL-Final", filename="FluentlyXL-Final.safetensors", token=HF_TOKEN),
|
|
@@ -65,6 +71,7 @@ def load_pipeline(model_name: str):
|
|
| 65 |
raise ValueError(f"Unknown model: {model_name}")
|
| 66 |
|
| 67 |
pipe.to(device)
|
|
|
|
| 68 |
return pipe
|
| 69 |
|
| 70 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
|
|
| 18 |
# Load HF token from environment variable
|
| 19 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 20 |
|
| 21 |
+
# Dictionary to store loaded pipelines
|
| 22 |
+
loaded_pipelines = {}
|
| 23 |
+
|
| 24 |
# Function to load pipeline dynamically
|
| 25 |
def load_pipeline(model_name: str):
|
| 26 |
+
if model_name in loaded_pipelines:
|
| 27 |
+
return loaded_pipelines[model_name]
|
| 28 |
+
|
| 29 |
if model_name == "Fluently XL Final":
|
| 30 |
pipe = StableDiffusionXLPipeline.from_single_file(
|
| 31 |
hf_hub_download(repo_id="fluently/Fluently-XL-Final", filename="FluentlyXL-Final.safetensors", token=HF_TOKEN),
|
|
|
|
| 71 |
raise ValueError(f"Unknown model: {model_name}")
|
| 72 |
|
| 73 |
pipe.to(device)
|
| 74 |
+
loaded_pipelines[model_name] = pipe
|
| 75 |
return pipe
|
| 76 |
|
| 77 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|