Spaces:
Runtime error
Runtime error
Upload folder using huggingface_hub
Browse files- trans_cli_demo.py +1 -1
- trans_web_demo.py +1 -0
trans_cli_demo.py
CHANGED
|
@@ -18,7 +18,7 @@ from threading import Thread
|
|
| 18 |
from transformers import AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer, AutoModel
|
| 19 |
|
| 20 |
MODEL_PATH = os.environ.get('MODEL_PATH', 'THUDM/glm-4-9b-chat')
|
| 21 |
-
|
| 22 |
print("MODEL_PATH: " + MODEL_PATH)
|
| 23 |
|
| 24 |
## If use peft model.
|
|
|
|
| 18 |
from transformers import AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer, AutoModel
|
| 19 |
|
| 20 |
MODEL_PATH = os.environ.get('MODEL_PATH', 'THUDM/glm-4-9b-chat')
|
| 21 |
+
MODEL_PATH = "/Users/zmac/Documents/opensrc/llms/GLM-4/models"
|
| 22 |
print("MODEL_PATH: " + MODEL_PATH)
|
| 23 |
|
| 24 |
## If use peft model.
|
trans_web_demo.py
CHANGED
|
@@ -29,6 +29,7 @@ TokenizerType = Union[PreTrainedTokenizer, PreTrainedTokenizerFast]
|
|
| 29 |
|
| 30 |
MODEL_PATH = os.environ.get('MODEL_PATH', 'THUDM/glm-4-9b-chat')
|
| 31 |
#MODEL_PATH = "/Users/zmac/.cache/huggingface/hub/models--THUDM--glm-4-9b-chat/snapshots/04419001bc63e05e70991ade6da1f91c4aeec278"
|
|
|
|
| 32 |
TOKENIZER_PATH = os.environ.get("TOKENIZER_PATH", MODEL_PATH)
|
| 33 |
|
| 34 |
|
|
|
|
| 29 |
|
| 30 |
MODEL_PATH = os.environ.get('MODEL_PATH', 'THUDM/glm-4-9b-chat')
|
| 31 |
#MODEL_PATH = "/Users/zmac/.cache/huggingface/hub/models--THUDM--glm-4-9b-chat/snapshots/04419001bc63e05e70991ade6da1f91c4aeec278"
|
| 32 |
+
MODEL_PATH = "/Users/zmac/Documents/opensrc/llms/GLM-4/models"
|
| 33 |
TOKENIZER_PATH = os.environ.get("TOKENIZER_PATH", MODEL_PATH)
|
| 34 |
|
| 35 |
|