check for the existence of the default accelerate config that can create headaches (#561)
Browse files- scripts/finetune.py +9 -0
scripts/finetune.py
CHANGED
|
@@ -14,6 +14,7 @@ import transformers
|
|
| 14 |
import yaml
|
| 15 |
|
| 16 |
# add src to the pythonpath so we don't need to pip install this
|
|
|
|
| 17 |
from art import text2art
|
| 18 |
from transformers import GenerationConfig, TextStreamer
|
| 19 |
|
|
@@ -254,9 +255,17 @@ def load_datasets(
|
|
| 254 |
)
|
| 255 |
|
| 256 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 257 |
def do_cli(config: Path = Path("examples/"), **kwargs):
|
| 258 |
print_axolotl_text_art()
|
| 259 |
parsed_cfg = load_cfg(config, **kwargs)
|
|
|
|
| 260 |
parser = transformers.HfArgumentParser((TrainerCliArgs))
|
| 261 |
parsed_cli_args, _ = parser.parse_args_into_dataclasses(
|
| 262 |
return_remaining_strings=True
|
|
|
|
| 14 |
import yaml
|
| 15 |
|
| 16 |
# add src to the pythonpath so we don't need to pip install this
|
| 17 |
+
from accelerate.commands.config import config_args
|
| 18 |
from art import text2art
|
| 19 |
from transformers import GenerationConfig, TextStreamer
|
| 20 |
|
|
|
|
| 255 |
)
|
| 256 |
|
| 257 |
|
| 258 |
+
def check_accelerate_default_config():
|
| 259 |
+
if Path(config_args.default_yaml_config_file).exists():
|
| 260 |
+
LOG.warning(
|
| 261 |
+
f"accelerate config file found at {config_args.default_yaml_config_file}. This can lead to unexpected errors"
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
|
| 265 |
def do_cli(config: Path = Path("examples/"), **kwargs):
|
| 266 |
print_axolotl_text_art()
|
| 267 |
parsed_cfg = load_cfg(config, **kwargs)
|
| 268 |
+
check_accelerate_default_config()
|
| 269 |
parser = transformers.HfArgumentParser((TrainerCliArgs))
|
| 270 |
parsed_cli_args, _ = parser.parse_args_into_dataclasses(
|
| 271 |
return_remaining_strings=True
|