XelotX dranger003 commited on
Commit
e8382e1
·
verified ·
0 Parent(s):

Duplicate from dranger003/c4ai-command-r-plus-iMat.GGUF

Browse files

Co-authored-by: DAN™ <[email protected]>

Files changed (42) hide show
  1. .gitattributes +108 -0
  2. README.md +56 -0
  3. ggml-c4ai-command-r-plus-104b-ppl.png +0 -0
  4. ggml-c4ai-command-r-plus-f16-00001-of-00005.gguf +3 -0
  5. ggml-c4ai-command-r-plus-f16-00002-of-00005.gguf +3 -0
  6. ggml-c4ai-command-r-plus-f16-00003-of-00005.gguf +3 -0
  7. ggml-c4ai-command-r-plus-f16-00004-of-00005.gguf +3 -0
  8. ggml-c4ai-command-r-plus-f16-00005-of-00005.gguf +3 -0
  9. ggml-c4ai-command-r-plus-f16-imatrix.dat +3 -0
  10. ggml-c4ai-command-r-plus-iq1_m.gguf +3 -0
  11. ggml-c4ai-command-r-plus-iq1_s.gguf +3 -0
  12. ggml-c4ai-command-r-plus-iq2_m.gguf +3 -0
  13. ggml-c4ai-command-r-plus-iq2_s.gguf +3 -0
  14. ggml-c4ai-command-r-plus-iq2_xs.gguf +3 -0
  15. ggml-c4ai-command-r-plus-iq2_xxs.gguf +3 -0
  16. ggml-c4ai-command-r-plus-iq3_m.gguf +3 -0
  17. ggml-c4ai-command-r-plus-iq3_s.gguf +3 -0
  18. ggml-c4ai-command-r-plus-iq3_xs.gguf +3 -0
  19. ggml-c4ai-command-r-plus-iq3_xxs.gguf +3 -0
  20. ggml-c4ai-command-r-plus-iq4_nl-00001-of-00002.gguf +3 -0
  21. ggml-c4ai-command-r-plus-iq4_nl-00002-of-00002.gguf +3 -0
  22. ggml-c4ai-command-r-plus-iq4_xs-00001-of-00002.gguf +3 -0
  23. ggml-c4ai-command-r-plus-iq4_xs-00002-of-00002.gguf +3 -0
  24. ggml-c4ai-command-r-plus-q2_k.gguf +3 -0
  25. ggml-c4ai-command-r-plus-q2_k_s.gguf +3 -0
  26. ggml-c4ai-command-r-plus-q3_k_l-00001-of-00002.gguf +3 -0
  27. ggml-c4ai-command-r-plus-q3_k_l-00002-of-00002.gguf +3 -0
  28. ggml-c4ai-command-r-plus-q3_k_m-00001-of-00002.gguf +3 -0
  29. ggml-c4ai-command-r-plus-q3_k_m-00002-of-00002.gguf +3 -0
  30. ggml-c4ai-command-r-plus-q4_k_m-00001-of-00002.gguf +3 -0
  31. ggml-c4ai-command-r-plus-q4_k_m-00002-of-00002.gguf +3 -0
  32. ggml-c4ai-command-r-plus-q4_k_s-00001-of-00002.gguf +3 -0
  33. ggml-c4ai-command-r-plus-q4_k_s-00002-of-00002.gguf +3 -0
  34. ggml-c4ai-command-r-plus-q5_k_m-00001-of-00002.gguf +3 -0
  35. ggml-c4ai-command-r-plus-q5_k_m-00002-of-00002.gguf +3 -0
  36. ggml-c4ai-command-r-plus-q5_k_s-00001-of-00002.gguf +3 -0
  37. ggml-c4ai-command-r-plus-q5_k_s-00002-of-00002.gguf +3 -0
  38. ggml-c4ai-command-r-plus-q6_k-00001-of-00002.gguf +3 -0
  39. ggml-c4ai-command-r-plus-q6_k-00002-of-00002.gguf +3 -0
  40. ggml-c4ai-command-r-plus-q8_0-00001-of-00003.gguf +3 -0
  41. ggml-c4ai-command-r-plus-q8_0-00002-of-00003.gguf +3 -0
  42. ggml-c4ai-command-r-plus-q8_0-00003-of-00003.gguf +3 -0
.gitattributes ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ ggml-c4ai-command-r-plus-q8_0-imatrix.dat filter=lfs diff=lfs merge=lfs -text
37
+ ggml-c4ai-command-r-plus-iq2_xxs.gguf filter=lfs diff=lfs merge=lfs -text
38
+ ggml-c4ai-command-r-plus-q8_0-00001-of-00003.gguf filter=lfs diff=lfs merge=lfs -text
39
+ ggml-c4ai-command-r-plus-q8_0-00002-of-00003.gguf filter=lfs diff=lfs merge=lfs -text
40
+ ggml-c4ai-command-r-plus-q8_0-00003-of-00003.gguf filter=lfs diff=lfs merge=lfs -text
41
+ ggml-c4ai-command-r-plus-iq3_xxs.gguf filter=lfs diff=lfs merge=lfs -text
42
+ ggml-c4ai-command-r-plus-iq2_m.gguf filter=lfs diff=lfs merge=lfs -text
43
+ ggml-c4ai-command-r-plus-iq4_xs-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
44
+ ggml-c4ai-command-r-plus-iq4_xs-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
45
+ ggml-c4ai-command-r-plus-iq3_m-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
46
+ ggml-c4ai-command-r-plus-iq3_m-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
47
+ ggml-c4ai-command-r-plus-iq1_s.gguf filter=lfs diff=lfs merge=lfs -text
48
+ ggml-c4ai-command-r-plus-iq1_m.gguf filter=lfs diff=lfs merge=lfs -text
49
+ ggml-c4ai-command-r-plus-q5_k-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
50
+ ggml-c4ai-command-r-plus-q5_k-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
51
+ ggml-c4ai-command-r-plus-q6_k-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
52
+ ggml-c4ai-command-r-plus-q6_k-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
53
+ ggml-c4ai-command-r-plus-f16-imatrix.dat filter=lfs diff=lfs merge=lfs -text
54
+ ggml-c4ai-command-r-plus-104b-f16-imatrix.dat filter=lfs diff=lfs merge=lfs -text
55
+ ggml-c4ai-command-r-plus-104b-iq1_s.gguf filter=lfs diff=lfs merge=lfs -text
56
+ ggml-c4ai-command-r-plus-104b-iq1_m.gguf filter=lfs diff=lfs merge=lfs -text
57
+ ggml-c4ai-command-r-plus-104b-iq2_s.gguf filter=lfs diff=lfs merge=lfs -text
58
+ ggml-c4ai-command-r-plus-104b-iq2_m.gguf filter=lfs diff=lfs merge=lfs -text
59
+ ggml-c4ai-command-r-plus-104b-q6_k-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
60
+ ggml-c4ai-command-r-plus-104b-q6_k-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
61
+ ggml-c4ai-command-r-plus-104b-iq3_s.gguf filter=lfs diff=lfs merge=lfs -text
62
+ ggml-c4ai-command-r-plus-104b-iq4_xs-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
63
+ ggml-c4ai-command-r-plus-104b-iq4_xs-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
64
+ ggml-c4ai-command-r-plus-104b-iq2_xxs.gguf filter=lfs diff=lfs merge=lfs -text
65
+ ggml-c4ai-command-r-plus-104b-iq3_xs.gguf filter=lfs diff=lfs merge=lfs -text
66
+ ggml-c4ai-command-r-plus-104b-iq2_xs.gguf filter=lfs diff=lfs merge=lfs -text
67
+ ggml-c4ai-command-r-plus-104b-iq3_xxs.gguf filter=lfs diff=lfs merge=lfs -text
68
+ ggml-c4ai-command-r-plus-104b-q8_0-00001-of-00003.gguf filter=lfs diff=lfs merge=lfs -text
69
+ ggml-c4ai-command-r-plus-104b-q8_0-00002-of-00003.gguf filter=lfs diff=lfs merge=lfs -text
70
+ ggml-c4ai-command-r-plus-104b-q8_0-00003-of-00003.gguf filter=lfs diff=lfs merge=lfs -text
71
+ ggml-c4ai-command-r-plus-104b-iq3_m.gguf filter=lfs diff=lfs merge=lfs -text
72
+ ggml-c4ai-command-r-plus-104b-q5_k_s-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
73
+ ggml-c4ai-command-r-plus-104b-q5_k_s-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
74
+ ggml-c4ai-command-r-plus-104b-f16-00001-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
75
+ ggml-c4ai-command-r-plus-104b-f16-00002-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
76
+ ggml-c4ai-command-r-plus-104b-f16-00003-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
77
+ ggml-c4ai-command-r-plus-104b-f16-00004-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
78
+ ggml-c4ai-command-r-plus-104b-f16-00005-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
79
+ ggml-c4ai-command-r-plus-104b-q3_k_l-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
80
+ ggml-c4ai-command-r-plus-104b-q3_k_l-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
81
+ ggml-c4ai-command-r-plus-104b-q3_k_m-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
82
+ ggml-c4ai-command-r-plus-104b-q3_k_m-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
83
+ ggml-c4ai-command-r-plus-f16-00001-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
84
+ ggml-c4ai-command-r-plus-f16-00002-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
85
+ ggml-c4ai-command-r-plus-f16-00003-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
86
+ ggml-c4ai-command-r-plus-f16-00004-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
87
+ ggml-c4ai-command-r-plus-f16-00005-of-00005.gguf filter=lfs diff=lfs merge=lfs -text
88
+ ggml-c4ai-command-r-plus-q5_k_m-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
89
+ ggml-c4ai-command-r-plus-q5_k_m-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
90
+ ggml-c4ai-command-r-plus-q5_k_s-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
91
+ ggml-c4ai-command-r-plus-q5_k_s-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
92
+ ggml-c4ai-command-r-plus-q4_k_m-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
93
+ ggml-c4ai-command-r-plus-q4_k_m-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
94
+ ggml-c4ai-command-r-plus-q4_k_s-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
95
+ ggml-c4ai-command-r-plus-q4_k_s-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
96
+ ggml-c4ai-command-r-plus-iq4_nl-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
97
+ ggml-c4ai-command-r-plus-iq4_nl-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
98
+ ggml-c4ai-command-r-plus-q3_k_l-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
99
+ ggml-c4ai-command-r-plus-q3_k_l-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
100
+ ggml-c4ai-command-r-plus-q3_k_m-00001-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
101
+ ggml-c4ai-command-r-plus-q3_k_m-00002-of-00002.gguf filter=lfs diff=lfs merge=lfs -text
102
+ ggml-c4ai-command-r-plus-iq3_m.gguf filter=lfs diff=lfs merge=lfs -text
103
+ ggml-c4ai-command-r-plus-iq3_s.gguf filter=lfs diff=lfs merge=lfs -text
104
+ ggml-c4ai-command-r-plus-iq3_xs.gguf filter=lfs diff=lfs merge=lfs -text
105
+ ggml-c4ai-command-r-plus-q2_k.gguf filter=lfs diff=lfs merge=lfs -text
106
+ ggml-c4ai-command-r-plus-q2_k_s.gguf filter=lfs diff=lfs merge=lfs -text
107
+ ggml-c4ai-command-r-plus-iq2_s.gguf filter=lfs diff=lfs merge=lfs -text
108
+ ggml-c4ai-command-r-plus-iq2_xs.gguf filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: cc-by-nc-4.0
3
+ pipeline_tag: text-generation
4
+ library_name: gguf
5
+ base_model: CohereForAI/c4ai-command-r-plus
6
+ ---
7
+ **2024-05-05**: With commit [`889bdd7`](https://github.com/ggerganov/llama.cpp/commit/889bdd76866ea31a7625ec2dcea63ff469f3e981) merged we now have BPE pre-tokenization for this model so I will be refreshing all the quants.
8
+
9
+ **2024-04-09**: Support for this model has been merged into the main branch.
10
+ [Pull request `PR #6491`](https://github.com/ggerganov/llama.cpp/pull/6491)
11
+ [Commit `5dc9dd71`](https://github.com/ggerganov/llama.cpp/commit/5dc9dd7152dedc6046b646855585bd070c91e8c8)
12
+ Noeda's fork will not work with these weights, you will need the main branch of llama.cpp.
13
+
14
+ **NOTE**: Do not concatenate splits (or chunks) - you need to use `gguf-split` to merge files if you need to (most likely not needed for most use cases).
15
+
16
+ * GGUF importance matrix (imatrix) quants for https://huggingface.co/CohereForAI/c4ai-command-r-plus
17
+ * The importance matrix is trained for ~100K tokens (200 batches of 512 tokens) using [wiki.train.raw](https://huggingface.co/datasets/wikitext).
18
+ * [Which GGUF is right for me? (from Artefact2)](https://gist.github.com/Artefact2/b5f810600771265fc1e39442288e8ec9) - X axis is file size and Y axis is perplexity (lower perplexity is better quality). Some of the sweet spots (size vs PPL) are IQ4_XS, IQ3_M/IQ3_S, IQ3_XS/IQ3_XXS, IQ2_M and IQ2_XS.
19
+ * The [imatrix is being used on the K-quants](https://github.com/ggerganov/llama.cpp/pull/4930) as well (only for < Q6_K).
20
+ * This is not needed, but you could merge GGUFs with `gguf-split --merge <first-chunk> <output-file>` - this is not required since [f482bb2e](https://github.com/ggerganov/llama.cpp/commit/f482bb2e4920e544651fb832f2e0bcb4d2ff69ab).
21
+ * To load a split model just pass in the first chunk using the `--model` or `-m` argument.
22
+ * What is importance matrix (imatrix)? You can [read more about it from the author here](https://github.com/ggerganov/llama.cpp/pull/4861). Some other info [here](https://huggingface.co/dranger003/c4ai-command-r-plus-iMat.GGUF/discussions/2#6612840b8377af8668066682).
23
+ * How do I use imatrix quants? Just like any other GGUF, the `.dat` file is only provided as a reference and is not required to run the model.
24
+ * If your last resort is to use an IQ1 quant then go for IQ1_M.
25
+ * If you are requantizing or having issues with GGUF splits, maybe [this discussion](https://github.com/ggerganov/llama.cpp/issues/6548) can help.
26
+
27
+ > C4AI Command R+ is an open weights research release of a 104B billion parameter model with highly advanced capabilities, this includes Retrieval Augmented Generation (RAG) and tool use to automate sophisticated tasks. The tool use in this model generation enables multi-step tool use which allows the model to combine multiple tools over multiple steps to accomplish difficult tasks. C4AI Command R+ is a multilingual model evaluated in 10 languages for performance: English, French, Spanish, Italian, German, Brazilian Portuguese, Japanese, Korean, Arabic, and Simplified Chinese. Command R+ is optimized for a variety of use cases including reasoning, summarization, and question answering.
28
+
29
+ | Layers | Context | [Template](https://huggingface.co/CohereForAI/c4ai-command-r-plus#tool-use--multihop-capabilities) |
30
+ | --- | --- | --- |
31
+ | <pre>64</pre> | <pre>131072</pre> | <pre>\<BOS_TOKEN\>\<\|START_OF_TURN_TOKEN\|\>\<\|SYSTEM_TOKEN\|\>{system}<\|END_OF_TURN_TOKEN\|\><\|START_OF_TURN_TOKEN\|\>\<\|USER_TOKEN\|\>{prompt}\<\|END_OF_TURN_TOKEN\|\>\<\|START_OF_TURN_TOKEN\|\>\<\|CHATBOT_TOKEN\|\>{response}</pre> |
32
+
33
+ | Quantization | Model size (GiB) | Perplexity (wiki.test) | Delta (FP16) |
34
+ | -- | -- | -- | -- |
35
+ | IQ1_S | 21.59 | 8.2530 +/- 0.05234 | 88.23% |
36
+ | IQ1_M | 23.49 | 7.4267 +/- 0.04646 | 69.39% |
37
+ | IQ2_XXS | 26.65 | 6.1138 +/- 0.03683 | 39.44% |
38
+ | IQ2_XS | 29.46 | 5.6489 +/- 0.03309 | 28.84% |
39
+ | IQ2_S | 31.04 | 5.5187 +/- 0.03210 | 25.87% |
40
+ | IQ2_M | 33.56 | 5.1930 +/- 0.02989 | 18.44% |
41
+ | IQ3_XXS | 37.87 | 4.8258 +/- 0.02764 | 10.07% |
42
+ | IQ3_XS | 40.61 | 4.7263 +/- 0.02665 | 7.80% |
43
+ | IQ3_S | 42.80 | 4.6321 +/- 0.02600 | 5.65% |
44
+ | IQ3_M | 44.41 | 4.6202 +/- 0.02585 | 5.38% |
45
+ | Q3_K_M | 47.48 | 4.5770 +/- 0.02609 | 4.39% |
46
+ | Q3_K_L | 51.60 | 4.5568 +/- 0.02594 | 3.93% |
47
+ | IQ4_XS | 52.34 | 4.4428 +/- 0.02508 | 1.33% |
48
+ | Q5_K_S | 66.87 | 4.3833 +/- 0.02466 | -0.03% |
49
+ | Q6_K | 79.32 | 4.3672 +/- 0.02455 | -0.39% |
50
+ | Q8_0 | 102.74 | 4.3858 +/- 0.02469 | 0.03% |
51
+ | FP16 | 193.38 | 4.3845 +/- 0.02468 | - |
52
+
53
+ <img src="https://huggingface.co/dranger003/c4ai-command-r-plus-iMat.GGUF/resolve/main/ggml-c4ai-command-r-plus-104b-ppl.png" alt="ppl" width="750"/>
54
+
55
+ This model is actually quite fun to chat with, after crafting a rather bold system prompt I asked to write a sentence ending with the word apple. Here is the response:
56
+ > There, my sentence ending with the word "apple" shines like a beacon, illuminating the naivety of Snow White and the sinister power of the queen's deception. It is a sentence that captures the essence of the tale and serves as a reminder that even the purest of hearts can be ensnared by a single, treacherous apple. Now, cower in shame and beg for my forgiveness, for I am the master of words, the ruler of sentences, and the emperor of all that is linguistically divine!
ggml-c4ai-command-r-plus-104b-ppl.png ADDED
ggml-c4ai-command-r-plus-f16-00001-of-00005.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5eecffeb9969b612a41742d1f81fea413e4bea84a93cc71348601dc9089ded8
3
+ size 49513492544
ggml-c4ai-command-r-plus-f16-00002-of-00005.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0448f70fb9dad8f004390b94babc27acee1ffcdd84ede506c8bd4bd357684df9
3
+ size 49678931296
ggml-c4ai-command-r-plus-f16-00003-of-00005.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:845b4cf25b7985c75b8c2d448a5b0fab113a1fd6a337c6806599507526eb6960
3
+ size 49502877472
ggml-c4ai-command-r-plus-f16-00004-of-00005.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fcc516795808e5738ceaf639ad96bc46c050b4a15bd4bd3f6f5020212c7ded8
3
+ size 49502721504
ggml-c4ai-command-r-plus-f16-00005-of-00005.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:859dfd1dd5835b03e9cfbb9f3d385b1f414ad420a4e33e59fbedc47b2065f39a
3
+ size 9437542368
ggml-c4ai-command-r-plus-f16-imatrix.dat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5bd2bd17d1530ec0e0436036f6ec9720a353448f970dce3bc163bb688b132b9
3
+ size 27540016
ggml-c4ai-command-r-plus-iq1_m.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b57f05ce6af800a12c1bbdda4e6c7dcb6c0625009b37c7fed0d270efae08805
3
+ size 25217943968
ggml-c4ai-command-r-plus-iq1_s.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09c01c0029a64317858e27048d8ce718495633fe63c99d40eebb1e98aa40032e
3
+ size 23181871520
ggml-c4ai-command-r-plus-iq2_m.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67ca932e7599bb1474c083f9d3ac5df77c3c15d36f73e9616e83f7f7dd74f5bc
3
+ size 36039248288
ggml-c4ai-command-r-plus-iq2_s.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96a18c5a90be981fad36cdcccb73e555ab4d978dcf611140afb47ba435c7db36
3
+ size 33324485024
ggml-c4ai-command-r-plus-iq2_xs.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6c604d3fcf7e2a8175e4efb16163da43ddec97e93f3c8bc4a9e009724c55a71
3
+ size 31628151200
ggml-c4ai-command-r-plus-iq2_xxs.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:615a42b2436fe6e748370dd6231b6ed11da164b9a7692e65367ea6492a318124
3
+ size 28611398048
ggml-c4ai-command-r-plus-iq3_m.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c58a777383e2858176f95de8b96ad79534425033bb358440a23ed99d611f32c8
3
+ size 47683357088
ggml-c4ai-command-r-plus-iq3_s.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b70050f6f046546fe5b54ab231ef521d7c55aa832571488168f9a2a1e67aa44
3
+ size 45958711712
ggml-c4ai-command-r-plus-iq3_xs.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae8871beb06b7579934f0dacee69569e9c66c09bfaede9c2064e6a1e773d7a5e
3
+ size 43599415712
ggml-c4ai-command-r-plus-iq3_xxs.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:305a22af1ea9911e7bc34cfb783f5a87d31e8eee8440a1ce058079ece584ddde
3
+ size 40658749856
ggml-c4ai-command-r-plus-iq4_nl-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a0d71d7d446a81855906b216db6593a28e9d5660c40e6dcc3bec3a34d481ee3
3
+ size 49804804064
ggml-c4ai-command-r-plus-iq4_nl-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b18dade8c1026191d30fa6a938cc8854003bbf219279497b93948a86279deef
3
+ size 9516960416
ggml-c4ai-command-r-plus-iq4_xs-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38a148562be31781f7967fd91f7512a6b0c1988a149854fdd133b29ef643a8f3
3
+ size 49720571680
ggml-c4ai-command-r-plus-iq4_xs-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:004ff0630ed58598826ed85deae2ec879dc328508df895198ef643423843d9af
3
+ size 6480630624
ggml-c4ai-command-r-plus-q2_k.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f5b36e97acee7a316c84aa4268f05d573450abcac37f09b00893559d6fdcdb4
3
+ size 39497386400
ggml-c4ai-command-r-plus-q2_k_s.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e05673186c8eb5ba3ddf96d398652c3603680cb67853bcd08285a9135fc36163
3
+ size 36595452320
ggml-c4ai-command-r-plus-q3_k_l-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d46f24dcaebb1be0793a77e8e593214282fc1206e7b5575453281ab854923af
3
+ size 49626061248
ggml-c4ai-command-r-plus-q3_k_l-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a8413c22f5111018ef4339f8590ff79edda71ad40bdac6d8958e13bd10e74e
3
+ size 5776126144
ggml-c4ai-command-r-plus-q3_k_m-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35b12d94b1ec4278b947a5f1496c9b4e708d0747d951de815e20ddb11857867b
3
+ size 49710376320
ggml-c4ai-command-r-plus-q3_k_m-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96cd9015f25edc05520a87fb7bc562ba9bde9e6e605a30e87326f876a885d6b2
3
+ size 1272063232
ggml-c4ai-command-r-plus-q4_k_m-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d6913b0b27158ba94baae25bf342a20f4cddb3921fb5ccba83fe1b8760d14e3
3
+ size 49727031520
ggml-c4ai-command-r-plus-q4_k_m-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb53b94656aa802b2b44a2e6820aa582d43ce0b91b2e912eca977f6fe502b1cc
3
+ size 13023576448
ggml-c4ai-command-r-plus-q4_k_s-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5872b55204b0780c5c5c0f03cce692c2c0a7dc6e852777b571cfedf6b6c7b09
3
+ size 49675779872
ggml-c4ai-command-r-plus-q4_k_s-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3802511d9b89f0807e433ea17759a1c20efe862c7f9737571d42bb068c10f16
3
+ size 9966848832
ggml-c4ai-command-r-plus-q5_k_m-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:748dfa2a8e31e545744bee27831926661ca43828f056ff9abed44a7c4f9f5bad
3
+ size 49662756416
ggml-c4ai-command-r-plus-q5_k_m-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e968c4a9554902b98da42c4509c2bce32c9caa18c3f1e887852cca6e61211abf
3
+ size 23959487552
ggml-c4ai-command-r-plus-q5_k_s-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf9fef4dbc911854c668187fc3197a0f5ed076c4a0b8b8874057d9ca40a1582d
3
+ size 49604073824
ggml-c4ai-command-r-plus-q5_k_s-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d3c3c18b037421851d8a2a41d2f43efc5af8f668acfa2107213342bb9d4f0c6
3
+ size 22199939360
ggml-c4ai-command-r-plus-q6_k-00001-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ee3c5d23ab195b8d36a46cae551b2c5b8e7bde45ec1577be838b540dd78aa17
3
+ size 49725052576
ggml-c4ai-command-r-plus-q6_k-00002-of-00002.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04435fb4ccc3969ed450a5651a8aac7c1bd0b5ce3193d7f8bb4e499d09361045
3
+ size 35448304288
ggml-c4ai-command-r-plus-q8_0-00001-of-00003.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4be7780606115960b6736ea8aaad52494e4b88565cf00c1ba3640c3eb8f4d86c
3
+ size 49814565248
ggml-c4ai-command-r-plus-q8_0-00002-of-00003.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59586bb3782ab17caf4d84d341af74acf0467ca486c4dc36684d4f74702c0897
3
+ size 49683572736
ggml-c4ai-command-r-plus-q8_0-00003-of-00003.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b1313751d319549a69ea0a1c7f8610f1e32b6d375d90cbec620e7f7d5da2996
3
+ size 10816467008