Spaces:
Running
on
Zero
Running
on
Zero
Upload llmdolphin.py
Browse files- llmdolphin.py +8 -5
llmdolphin.py
CHANGED
|
@@ -6,6 +6,7 @@ from llama_cpp_agent.providers import LlamaCppPythonProvider
|
|
| 6 |
from llama_cpp_agent.chat_history import BasicChatHistory
|
| 7 |
from llama_cpp_agent.chat_history.messages import Roles
|
| 8 |
from ja_to_danbooru.ja_to_danbooru import jatags_to_danbooru_tags
|
|
|
|
| 9 |
|
| 10 |
|
| 11 |
llm_models_dir = "./llm_models"
|
|
@@ -127,6 +128,7 @@ def list_uniq(l):
|
|
| 127 |
return sorted(set(l), key=l.index)
|
| 128 |
|
| 129 |
|
|
|
|
| 130 |
def to_list_ja(s):
|
| 131 |
import re
|
| 132 |
s = re.sub(r'[γγ]', ',', s)
|
|
@@ -359,6 +361,7 @@ def select_dolphin_language(lang: str):
|
|
| 359 |
return gr.update(value=get_dolphin_sysprompt())
|
| 360 |
|
| 361 |
|
|
|
|
| 362 |
def get_raw_prompt(msg: str):
|
| 363 |
import re
|
| 364 |
m = re.findall(r'/GENBEGIN/(.+?)/GENEND/', msg, re.DOTALL)
|
|
@@ -443,12 +446,13 @@ def dolphin_respond(
|
|
| 443 |
def dolphin_parse(
|
| 444 |
history: list[tuple[str, str]],
|
| 445 |
):
|
| 446 |
-
if dolphin_sysprompt_mode == "Chat with LLM" or not history or len(history) < 1:
|
|
|
|
| 447 |
try:
|
| 448 |
msg = history[-1][0]
|
|
|
|
| 449 |
except Exception:
|
| 450 |
-
return ""
|
| 451 |
-
raw_prompt = get_raw_prompt(msg)
|
| 452 |
prompts = []
|
| 453 |
if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt):
|
| 454 |
prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit"])
|
|
@@ -471,7 +475,6 @@ def dolphin_respond_auto(
|
|
| 471 |
progress=gr.Progress(track_tqdm=True),
|
| 472 |
):
|
| 473 |
#if not is_japanese(message): return [(None, None)]
|
| 474 |
-
|
| 475 |
from pathlib import Path
|
| 476 |
progress(0, desc="Processing...")
|
| 477 |
|
|
@@ -543,9 +546,9 @@ def dolphin_parse_simple(
|
|
| 543 |
if dolphin_sysprompt_mode == "Chat with LLM" or not history or len(history) < 1: return message
|
| 544 |
try:
|
| 545 |
msg = history[-1][0]
|
|
|
|
| 546 |
except Exception:
|
| 547 |
return ""
|
| 548 |
-
raw_prompt = get_raw_prompt(msg)
|
| 549 |
prompts = []
|
| 550 |
if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt):
|
| 551 |
prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit"])
|
|
|
|
| 6 |
from llama_cpp_agent.chat_history import BasicChatHistory
|
| 7 |
from llama_cpp_agent.chat_history.messages import Roles
|
| 8 |
from ja_to_danbooru.ja_to_danbooru import jatags_to_danbooru_tags
|
| 9 |
+
import wrapt_timeout_decorator
|
| 10 |
|
| 11 |
|
| 12 |
llm_models_dir = "./llm_models"
|
|
|
|
| 128 |
return sorted(set(l), key=l.index)
|
| 129 |
|
| 130 |
|
| 131 |
+
@wrapt_timeout_decorator.timeout(dec_timeout=3.5)
|
| 132 |
def to_list_ja(s):
|
| 133 |
import re
|
| 134 |
s = re.sub(r'[γγ]', ',', s)
|
|
|
|
| 361 |
return gr.update(value=get_dolphin_sysprompt())
|
| 362 |
|
| 363 |
|
| 364 |
+
@wrapt_timeout_decorator.timeout(dec_timeout=5.0)
|
| 365 |
def get_raw_prompt(msg: str):
|
| 366 |
import re
|
| 367 |
m = re.findall(r'/GENBEGIN/(.+?)/GENEND/', msg, re.DOTALL)
|
|
|
|
| 446 |
def dolphin_parse(
|
| 447 |
history: list[tuple[str, str]],
|
| 448 |
):
|
| 449 |
+
if dolphin_sysprompt_mode == "Chat with LLM" or not history or len(history) < 1:
|
| 450 |
+
return "", gr.update(visible=True), gr.update(visible=True)
|
| 451 |
try:
|
| 452 |
msg = history[-1][0]
|
| 453 |
+
raw_prompt = get_raw_prompt(msg)
|
| 454 |
except Exception:
|
| 455 |
+
return "", gr.update(visible=True), gr.update(visible=True)
|
|
|
|
| 456 |
prompts = []
|
| 457 |
if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt):
|
| 458 |
prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit"])
|
|
|
|
| 475 |
progress=gr.Progress(track_tqdm=True),
|
| 476 |
):
|
| 477 |
#if not is_japanese(message): return [(None, None)]
|
|
|
|
| 478 |
from pathlib import Path
|
| 479 |
progress(0, desc="Processing...")
|
| 480 |
|
|
|
|
| 546 |
if dolphin_sysprompt_mode == "Chat with LLM" or not history or len(history) < 1: return message
|
| 547 |
try:
|
| 548 |
msg = history[-1][0]
|
| 549 |
+
raw_prompt = get_raw_prompt(msg)
|
| 550 |
except Exception:
|
| 551 |
return ""
|
|
|
|
| 552 |
prompts = []
|
| 553 |
if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt):
|
| 554 |
prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit"])
|