Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
|
@@ -70,7 +70,7 @@ if(is_spaces):
|
|
| 70 |
def create_dataset(*inputs):
|
| 71 |
print("Creating dataset")
|
| 72 |
images = inputs[0]
|
| 73 |
-
destination_folder = str(uuid.uuid4())
|
| 74 |
if not os.path.exists(destination_folder):
|
| 75 |
os.makedirs(destination_folder)
|
| 76 |
|
|
@@ -140,7 +140,7 @@ def start_training(
|
|
| 140 |
slugged_lora_name = slugify(lora_name)
|
| 141 |
|
| 142 |
# Load the default config
|
| 143 |
-
with open("
|
| 144 |
config = yaml.safe_load(f)
|
| 145 |
|
| 146 |
# Update the config with user inputs
|
|
@@ -170,29 +170,17 @@ def start_training(
|
|
| 170 |
config_path = f"config/{slugged_lora_name}.yaml"
|
| 171 |
with open(config_path, "w") as f:
|
| 172 |
yaml.dump(config, f)
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
|
|
|
|
|
|
|
| 179 |
|
| 180 |
return f"Training completed successfully. Model saved as {slugged_lora_name}"
|
| 181 |
|
| 182 |
-
def start_training_spaces(
|
| 183 |
-
lora_name,
|
| 184 |
-
concept_sentence,
|
| 185 |
-
steps,
|
| 186 |
-
lr,
|
| 187 |
-
rank,
|
| 188 |
-
dataset_folder,
|
| 189 |
-
sample_1,
|
| 190 |
-
sample_2,
|
| 191 |
-
sample_3,
|
| 192 |
-
):
|
| 193 |
-
#Feel free to include the spacerunner stuff here @abhishek
|
| 194 |
-
pass
|
| 195 |
-
|
| 196 |
theme = gr.themes.Monochrome(
|
| 197 |
text_size=gr.themes.Size(lg="18px", md="15px", sm="13px", xl="22px", xs="12px", xxl="24px", xxs="9px"),
|
| 198 |
font=[gr.themes.GoogleFont('Source Sans Pro'), 'ui-sans-serif', 'system-ui', 'sans-serif'],
|
|
@@ -296,8 +284,9 @@ with gr.Blocks(theme=theme, css=css) as demo:
|
|
| 296 |
```bash
|
| 297 |
git clone https://huggingface.co/spaces/flux-train/flux-lora-trainer
|
| 298 |
cd flux-lora-trainer
|
|
|
|
| 299 |
```
|
| 300 |
-
|
| 301 |
Then you can install ai-toolkit
|
| 302 |
```bash
|
| 303 |
git clone https://github.com/ostris/ai-toolkit.git
|
|
@@ -311,12 +300,17 @@ with gr.Blocks(theme=theme, css=css) as demo:
|
|
| 311 |
pip3 install -r requirements.txt
|
| 312 |
cd ..
|
| 313 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 314 |
|
| 315 |
Now you can run FLUX LoRA Ease locally by doing a simple
|
| 316 |
```py
|
| 317 |
python app.py
|
| 318 |
```
|
| 319 |
-
If you prefer command line, you can run Ostris' [AI Toolkit](https://github.com/ostris/ai-toolkit) yourself.
|
| 320 |
''')
|
| 321 |
|
| 322 |
dataset_folder = gr.State()
|
|
|
|
| 70 |
def create_dataset(*inputs):
|
| 71 |
print("Creating dataset")
|
| 72 |
images = inputs[0]
|
| 73 |
+
destination_folder = str(f"datasets/{uuid.uuid4()}")
|
| 74 |
if not os.path.exists(destination_folder):
|
| 75 |
os.makedirs(destination_folder)
|
| 76 |
|
|
|
|
| 140 |
slugged_lora_name = slugify(lora_name)
|
| 141 |
|
| 142 |
# Load the default config
|
| 143 |
+
with open("train_lora_flux_24gb.yaml", "r") as f:
|
| 144 |
config = yaml.safe_load(f)
|
| 145 |
|
| 146 |
# Update the config with user inputs
|
|
|
|
| 170 |
config_path = f"config/{slugged_lora_name}.yaml"
|
| 171 |
with open(config_path, "w") as f:
|
| 172 |
yaml.dump(config, f)
|
| 173 |
+
if(is_spaces):
|
| 174 |
+
pass
|
| 175 |
+
#do the spacerunner things here
|
| 176 |
+
else:
|
| 177 |
+
#run the job locally
|
| 178 |
+
job = get_job(config_path)
|
| 179 |
+
job.run()
|
| 180 |
+
job.cleanup()
|
| 181 |
|
| 182 |
return f"Training completed successfully. Model saved as {slugged_lora_name}"
|
| 183 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 184 |
theme = gr.themes.Monochrome(
|
| 185 |
text_size=gr.themes.Size(lg="18px", md="15px", sm="13px", xl="22px", xs="12px", xxl="24px", xxs="9px"),
|
| 186 |
font=[gr.themes.GoogleFont('Source Sans Pro'), 'ui-sans-serif', 'system-ui', 'sans-serif'],
|
|
|
|
| 284 |
```bash
|
| 285 |
git clone https://huggingface.co/spaces/flux-train/flux-lora-trainer
|
| 286 |
cd flux-lora-trainer
|
| 287 |
+
pip install requirements_local.txt
|
| 288 |
```
|
| 289 |
+
|
| 290 |
Then you can install ai-toolkit
|
| 291 |
```bash
|
| 292 |
git clone https://github.com/ostris/ai-toolkit.git
|
|
|
|
| 300 |
pip3 install -r requirements.txt
|
| 301 |
cd ..
|
| 302 |
```
|
| 303 |
+
|
| 304 |
+
Login with Hugging Face to access FLUX.1 [dev], choose a token with `write` permissions to push your LoRAs to the HF Hub
|
| 305 |
+
```bash
|
| 306 |
+
huggingface-cli login
|
| 307 |
+
```
|
| 308 |
|
| 309 |
Now you can run FLUX LoRA Ease locally by doing a simple
|
| 310 |
```py
|
| 311 |
python app.py
|
| 312 |
```
|
| 313 |
+
If you prefer command line, you can run Ostris' [AI Toolkit](https://github.com/ostris/ai-toolkit) yourself directly.
|
| 314 |
''')
|
| 315 |
|
| 316 |
dataset_folder = gr.State()
|