Commit
·
673a573
1
Parent(s):
42c2b6e
add all scripts
Browse files
README.md
CHANGED
|
@@ -79,33 +79,40 @@ $ pip install diffusers transformers accelerate
|
|
| 79 |
3. Run code:
|
| 80 |
|
| 81 |
```python
|
| 82 |
-
#!/usr/bin/env python3
|
| 83 |
import torch
|
|
|
|
|
|
|
|
|
|
| 84 |
from diffusers.utils import load_image
|
|
|
|
| 85 |
import cv2
|
|
|
|
| 86 |
|
| 87 |
from diffusers import (
|
| 88 |
ControlNetModel,
|
| 89 |
StableDiffusionControlNetPipeline,
|
| 90 |
UniPCMultistepScheduler,
|
| 91 |
)
|
| 92 |
-
import sys
|
| 93 |
|
| 94 |
-
|
| 95 |
|
| 96 |
image = load_image(
|
| 97 |
-
"https://huggingface.co/
|
| 98 |
)
|
| 99 |
|
|
|
|
|
|
|
| 100 |
low_threshold = 100
|
| 101 |
high_threshold = 200
|
| 102 |
|
| 103 |
image = cv2.Canny(image, low_threshold, high_threshold)
|
| 104 |
image = image[:, :, None]
|
| 105 |
image = np.concatenate([image, image, image], axis=2)
|
| 106 |
-
|
|
|
|
|
|
|
| 107 |
|
| 108 |
-
controlnet = ControlNetModel.from_pretrained(
|
| 109 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
| 110 |
"runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
|
| 111 |
)
|
|
@@ -114,16 +121,16 @@ pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
|
|
| 114 |
pipe.enable_model_cpu_offload()
|
| 115 |
|
| 116 |
generator = torch.manual_seed(33)
|
| 117 |
-
image = pipe("a blue paradise bird in the jungle", num_inference_steps=20, generator=generator, image=
|
| 118 |
|
| 119 |
-
image.save('images/
|
| 120 |
```
|
| 121 |
|
| 122 |
-

|
|
|
|
| 96 |
|
| 97 |
+
checkpoint = "ControlNet-1-1-preview/control_v11p_sd15_canny"
|
| 98 |
|
| 99 |
image = load_image(
|
| 100 |
+
"https://huggingface.co/ControlNet-1-1-preview/control_v11p_sd15_canny/resolve/main/images/input.png"
|
| 101 |
)
|
| 102 |
|
| 103 |
+
image = np.array(image)
|
| 104 |
+
|
| 105 |
low_threshold = 100
|
| 106 |
high_threshold = 200
|
| 107 |
|
| 108 |
image = cv2.Canny(image, low_threshold, high_threshold)
|
| 109 |
image = image[:, :, None]
|
| 110 |
image = np.concatenate([image, image, image], axis=2)
|
| 111 |
+
control_image = Image.fromarray(image)
|
| 112 |
+
|
| 113 |
+
control_image.save("./images/control.png")
|
| 114 |
|
| 115 |
+
controlnet = ControlNetModel.from_pretrained(checkpoint, torch_dtype=torch.float16)
|
| 116 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
| 117 |
"runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
|
| 118 |
)
|
|
|
|
| 121 |
pipe.enable_model_cpu_offload()
|
| 122 |
|
| 123 |
generator = torch.manual_seed(33)
|
| 124 |
+
image = pipe("a blue paradise bird in the jungle", num_inference_steps=20, generator=generator, image=control_image).images[0]
|
| 125 |
|
| 126 |
+
image.save('images/image_out.png')
|
| 127 |
```
|
| 128 |
|
| 129 |
+

|
| 130 |
|
| 131 |
+

|
| 132 |
|
| 133 |
+

|
| 134 |
|
| 135 |
## Other released checkpoints v1-1
|
| 136 |
|