Spaces:
Running
on
Zero
Running
on
Zero
Upload llmdolphin.py
Browse files- llmdolphin.py +4 -0
llmdolphin.py
CHANGED
|
@@ -24,13 +24,17 @@ llm_models = {
|
|
| 24 |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
| 26 |
"magnum-12b-v2.i1-Q4_K_M.gguf": ["mradermacher/magnum-12b-v2-i1-GGUF", MessagesFormatterType.CHATML],
|
|
|
|
| 27 |
"L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc.Q5_K_M.gguf": ["mradermacher/L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc-GGUF", MessagesFormatterType.LLAMA_3],
|
| 28 |
"YetAnotherMerge-v0.5.Q4_K_M.gguf": ["mradermacher/YetAnotherMerge-v0.5-GGUF", MessagesFormatterType.CHATML],
|
| 29 |
"open-hermes-sd-finetune-erot-story.Q5_K_M.gguf": ["mradermacher/open-hermes-sd-finetune-erot-story-GGUF", MessagesFormatterType.CHATML],
|
|
|
|
| 30 |
"cosmic-2.i1-Q5_K_M.gguf": ["mradermacher/cosmic-2-i1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
| 31 |
"Ellaria-9B.i1-Q4_K_M.gguf": ["mradermacher/Ellaria-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
| 32 |
"Apollo-0.4-Llama-3.1-8B.i1-Q5_K_M.gguf": ["mradermacher/Apollo-0.4-Llama-3.1-8B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 33 |
"NemoRemix-12B.Q4_K_M.gguf": ["mradermacher/NemoRemix-12B-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
| 34 |
"dolphin-2.9.4-llama3.1-8b.i1-Q5_K_M.gguf": ["mradermacher/dolphin-2.9.4-llama3.1-8b-i1-GGUF", MessagesFormatterType.CHATML],
|
| 35 |
"L3-SAO-MIX-8B-V1.i1-Q5_K_M.gguf": ["mradermacher/L3-SAO-MIX-8B-V1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 36 |
"bestofllama3-8b-stock-q5_k_m.gguf": ["bunnycore/BestofLLama3-8B-stock-Q5_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
|
| 24 |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
| 26 |
"magnum-12b-v2.i1-Q4_K_M.gguf": ["mradermacher/magnum-12b-v2-i1-GGUF", MessagesFormatterType.CHATML],
|
| 27 |
+
"Orthrus-12b-v0.8.Q4_K_M.gguf": ["mradermacher/Orthrus-12b-v0.8-GGUF", MessagesFormatterType.CHATML],
|
| 28 |
"L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc.Q5_K_M.gguf": ["mradermacher/L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc-GGUF", MessagesFormatterType.LLAMA_3],
|
| 29 |
"YetAnotherMerge-v0.5.Q4_K_M.gguf": ["mradermacher/YetAnotherMerge-v0.5-GGUF", MessagesFormatterType.CHATML],
|
| 30 |
"open-hermes-sd-finetune-erot-story.Q5_K_M.gguf": ["mradermacher/open-hermes-sd-finetune-erot-story-GGUF", MessagesFormatterType.CHATML],
|
| 31 |
+
"OntologyHermes-2.5-Mistral-7B.Q6_K.gguf": ["mradermacher/OntologyHermes-2.5-Mistral-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 32 |
"cosmic-2.i1-Q5_K_M.gguf": ["mradermacher/cosmic-2-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 33 |
+
"Mistral-Nemo-Instruct-2407.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-Instruct-2407-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 34 |
"Ellaria-9B.i1-Q4_K_M.gguf": ["mradermacher/Ellaria-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
| 35 |
"Apollo-0.4-Llama-3.1-8B.i1-Q5_K_M.gguf": ["mradermacher/Apollo-0.4-Llama-3.1-8B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 36 |
"NemoRemix-12B.Q4_K_M.gguf": ["mradermacher/NemoRemix-12B-GGUF", MessagesFormatterType.MISTRAL],
|
| 37 |
+
"Viviana_V3.i1-Q5_K_M.gguf": ["mradermacher/Viviana_V3-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 38 |
"dolphin-2.9.4-llama3.1-8b.i1-Q5_K_M.gguf": ["mradermacher/dolphin-2.9.4-llama3.1-8b-i1-GGUF", MessagesFormatterType.CHATML],
|
| 39 |
"L3-SAO-MIX-8B-V1.i1-Q5_K_M.gguf": ["mradermacher/L3-SAO-MIX-8B-V1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 40 |
"bestofllama3-8b-stock-q5_k_m.gguf": ["bunnycore/BestofLLama3-8B-stock-Q5_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|