Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
97c70d3
1
Parent(s):
60f6b78
SD3 support
Browse files
app.py
CHANGED
|
@@ -19,7 +19,7 @@ MODEL_MAP = {
|
|
| 19 |
"SD 3": "stabilityai/stable-diffusion-3-medium-diffusers",
|
| 20 |
}
|
| 21 |
RESOLUTION_MAP = { "SD 1.5": 512, "SD 2.1": 768, "SDXL": 1024, "SD 3": 1024 }
|
| 22 |
-
SEED_MAP = { "SD 1.5": 850728, "SD 2.1": 944905, "SDXL":
|
| 23 |
TAG_SCALE_MAP = {
|
| 24 |
"SD 1.5": 1.15, # 기본값
|
| 25 |
"SD 2.1": 1.15, # 기본값
|
|
@@ -47,7 +47,12 @@ def load_pipeline(model_name, progress):
|
|
| 47 |
model_id = MODEL_MAP[model_name]
|
| 48 |
pipeline_class = PIPELINE_MAP[model_name]
|
| 49 |
progress(0, desc=f"Loading model: {model_id} with {pipeline_class.__name__}...")
|
| 50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 51 |
pipe = pipe.to(device)
|
| 52 |
current_model_id = model_id
|
| 53 |
progress(1)
|
|
@@ -89,7 +94,7 @@ def infer(
|
|
| 89 |
seed = random.randint(0, MAX_SEED)
|
| 90 |
|
| 91 |
generator_custom = torch.Generator(device=device).manual_seed(int(seed))
|
| 92 |
-
generator_fixed
|
| 93 |
|
| 94 |
unconditional_prompt = ""
|
| 95 |
|
|
|
|
| 19 |
"SD 3": "stabilityai/stable-diffusion-3-medium-diffusers",
|
| 20 |
}
|
| 21 |
RESOLUTION_MAP = { "SD 1.5": 512, "SD 2.1": 768, "SDXL": 1024, "SD 3": 1024 }
|
| 22 |
+
SEED_MAP = { "SD 1.5": 850728, "SD 2.1": 944905, "SDXL": 450040818, "SD 3": 282386105 }
|
| 23 |
TAG_SCALE_MAP = {
|
| 24 |
"SD 1.5": 1.15, # 기본값
|
| 25 |
"SD 2.1": 1.15, # 기본값
|
|
|
|
| 47 |
model_id = MODEL_MAP[model_name]
|
| 48 |
pipeline_class = PIPELINE_MAP[model_name]
|
| 49 |
progress(0, desc=f"Loading model: {model_id} with {pipeline_class.__name__}...")
|
| 50 |
+
if model_name == "SD 3":
|
| 51 |
+
pipe = pipeline_class.from_pretrained(model_id, torch_dtype=torch_dtype,
|
| 52 |
+
text_encoder_3=None,
|
| 53 |
+
tokenizer_3=None,)
|
| 54 |
+
else:
|
| 55 |
+
pipe = pipeline_class.from_pretrained(model_id, torch_dtype=torch_dtype)
|
| 56 |
pipe = pipe.to(device)
|
| 57 |
current_model_id = model_id
|
| 58 |
progress(1)
|
|
|
|
| 94 |
seed = random.randint(0, MAX_SEED)
|
| 95 |
|
| 96 |
generator_custom = torch.Generator(device=device).manual_seed(int(seed))
|
| 97 |
+
generator_fixed = torch.Generator(device=device).manual_seed(int(seed))
|
| 98 |
|
| 99 |
unconditional_prompt = ""
|
| 100 |
|
pipelines/__pycache__/__init__.cpython-310.pyc
CHANGED
|
Binary files a/pipelines/__pycache__/__init__.cpython-310.pyc and b/pipelines/__pycache__/__init__.cpython-310.pyc differ
|
|
|
pipelines/__pycache__/pipeline_tag_stablediffusion.cpython-310.pyc
CHANGED
|
Binary files a/pipelines/__pycache__/pipeline_tag_stablediffusion.cpython-310.pyc and b/pipelines/__pycache__/pipeline_tag_stablediffusion.cpython-310.pyc differ
|
|
|
pipelines/__pycache__/pipeline_tag_stablediffusion3.cpython-310.pyc
CHANGED
|
Binary files a/pipelines/__pycache__/pipeline_tag_stablediffusion3.cpython-310.pyc and b/pipelines/__pycache__/pipeline_tag_stablediffusion3.cpython-310.pyc differ
|
|
|
pipelines/__pycache__/pipeline_tag_stablediffusionXL.cpython-310.pyc
CHANGED
|
Binary files a/pipelines/__pycache__/pipeline_tag_stablediffusionXL.cpython-310.pyc and b/pipelines/__pycache__/pipeline_tag_stablediffusionXL.cpython-310.pyc differ
|
|
|