Spaces:
Build error
Build error
various fixes post-release
Browse files- config.yml +4 -2
- tabbed.py +7 -4
config.yml
CHANGED
|
@@ -1,6 +1,8 @@
|
|
| 1 |
---
|
| 2 |
-
|
| 3 |
-
|
|
|
|
|
|
|
| 4 |
llama_cpp:
|
| 5 |
n_ctx: 2048
|
| 6 |
n_gpu_layers: 40 # llama 13b has 40 layers
|
|
|
|
| 1 |
---
|
| 2 |
+
hub:
|
| 3 |
+
repo_id: TheBloke/stable-vicuna-13B-GGML
|
| 4 |
+
filename: stable-vicuna-13B.ggml.q5_1.bin
|
| 5 |
+
# revision: v1.1.0
|
| 6 |
llama_cpp:
|
| 7 |
n_ctx: 2048
|
| 8 |
n_gpu_layers: 40 # llama 13b has 40 layers
|
tabbed.py
CHANGED
|
@@ -8,8 +8,11 @@ with open("./config.yml", "r") as f:
|
|
| 8 |
config = yaml.load(f, Loader=yaml.Loader)
|
| 9 |
while True:
|
| 10 |
try:
|
|
|
|
|
|
|
|
|
|
| 11 |
fp = hf_hub_download(
|
| 12 |
-
repo_id=
|
| 13 |
)
|
| 14 |
break
|
| 15 |
except LocalEntryNotFoundError as e:
|
|
@@ -115,16 +118,16 @@ with gr.Blocks() as demo:
|
|
| 115 |
with gr.Row():
|
| 116 |
with gr.Column():
|
| 117 |
max_tokens = gr.Slider(20, 1000, label="Max Tokens", step=20, value=300)
|
| 118 |
-
temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=0.
|
| 119 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
|
| 120 |
-
top_k = gr.Slider(0, 100, label="Top
|
| 121 |
repeat_penalty = gr.Slider(0.0, 2.0, label="Repetition Penalty", step=0.1, value=1.1)
|
| 122 |
|
| 123 |
system_msg = gr.Textbox(
|
| 124 |
start_message, label="System Message", interactive=False, visible=False)
|
| 125 |
|
| 126 |
chat_history_state = gr.State()
|
| 127 |
-
clear.click(clear_chat, inputs=[chat_history_state, message], outputs=[chat_history_state, message])
|
| 128 |
clear.click(lambda: None, None, chatbot, queue=False)
|
| 129 |
|
| 130 |
submit_click_event = submit.click(
|
|
|
|
| 8 |
config = yaml.load(f, Loader=yaml.Loader)
|
| 9 |
while True:
|
| 10 |
try:
|
| 11 |
+
hub_config = config["hub"]
|
| 12 |
+
repo_id = hub_config.pop("repo_id")
|
| 13 |
+
filename = hub_config.pop("filename")
|
| 14 |
fp = hf_hub_download(
|
| 15 |
+
repo_id=repo_id, filename=filename, **hub_config
|
| 16 |
)
|
| 17 |
break
|
| 18 |
except LocalEntryNotFoundError as e:
|
|
|
|
| 118 |
with gr.Row():
|
| 119 |
with gr.Column():
|
| 120 |
max_tokens = gr.Slider(20, 1000, label="Max Tokens", step=20, value=300)
|
| 121 |
+
temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=0.8)
|
| 122 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
|
| 123 |
+
top_k = gr.Slider(0, 100, label="Top K", step=1, value=40)
|
| 124 |
repeat_penalty = gr.Slider(0.0, 2.0, label="Repetition Penalty", step=0.1, value=1.1)
|
| 125 |
|
| 126 |
system_msg = gr.Textbox(
|
| 127 |
start_message, label="System Message", interactive=False, visible=False)
|
| 128 |
|
| 129 |
chat_history_state = gr.State()
|
| 130 |
+
clear.click(clear_chat, inputs=[chat_history_state, message], outputs=[chat_history_state, message], queue=False)
|
| 131 |
clear.click(lambda: None, None, chatbot, queue=False)
|
| 132 |
|
| 133 |
submit_click_event = submit.click(
|