Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -16,6 +16,7 @@ import transformers
|
|
| 16 |
from transformers import AutoTokenizer, T5EncoderModel
|
| 17 |
from translatepy import Translator
|
| 18 |
|
|
|
|
| 19 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
| 20 |
translator = Translator()
|
| 21 |
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
|
@@ -70,13 +71,9 @@ tokenizer_3 = AutoTokenizer.from_pretrained(
|
|
| 70 |
if torch.cuda.is_available():
|
| 71 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
| 72 |
repo,
|
| 73 |
-
tokenizer_3=tokenizer_3,
|
| 74 |
-
text_encoder_3=text_encoder_3,
|
| 75 |
torch_dtype=torch.float16).to("cuda")
|
| 76 |
pipe2 = StableDiffusion3Img2ImgPipeline.from_pretrained(
|
| 77 |
repo,
|
| 78 |
-
tokenizer_3=tokenizer_3,
|
| 79 |
-
text_encoder_3=text_encoder_3,
|
| 80 |
torch_dtype=torch.float16).to("cuda")
|
| 81 |
|
| 82 |
torch.set_float32_matmul_precision("high")
|
|
|
|
| 16 |
from transformers import AutoTokenizer, T5EncoderModel
|
| 17 |
from translatepy import Translator
|
| 18 |
|
| 19 |
+
|
| 20 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
| 21 |
translator = Translator()
|
| 22 |
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
|
|
|
| 71 |
if torch.cuda.is_available():
|
| 72 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
| 73 |
repo,
|
|
|
|
|
|
|
| 74 |
torch_dtype=torch.float16).to("cuda")
|
| 75 |
pipe2 = StableDiffusion3Img2ImgPipeline.from_pretrained(
|
| 76 |
repo,
|
|
|
|
|
|
|
| 77 |
torch_dtype=torch.float16).to("cuda")
|
| 78 |
|
| 79 |
torch.set_float32_matmul_precision("high")
|