RichardErkhov
commited on
Commit
•
4ddfd72
1
Parent(s):
63614a3
uploaded readme
Browse files
README.md
CHANGED
@@ -18,7 +18,7 @@ gemma-2-2b - GGUF
|
|
18 |
| [gemma-2-2b.IQ3_XS.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.IQ3_XS.gguf) | IQ3_XS | 1.22GB |
|
19 |
| [gemma-2-2b.IQ3_S.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.IQ3_S.gguf) | IQ3_S | 1.27GB |
|
20 |
| [gemma-2-2b.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K_S.gguf) | Q3_K_S | 1.27GB |
|
21 |
-
| [gemma-2-2b.IQ3_M.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.IQ3_M.gguf) | IQ3_M |
|
22 |
| [gemma-2-2b.Q3_K.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K.gguf) | Q3_K | 1.36GB |
|
23 |
| [gemma-2-2b.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K_M.gguf) | Q3_K_M | 1.36GB |
|
24 |
| [gemma-2-2b.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K_L.gguf) | Q3_K_L | 1.44GB |
|
@@ -31,7 +31,7 @@ gemma-2-2b - GGUF
|
|
31 |
| [gemma-2-2b.Q4_1.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q4_1.gguf) | Q4_1 | 1.64GB |
|
32 |
| [gemma-2-2b.Q5_0.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_0.gguf) | Q5_0 | 1.75GB |
|
33 |
| [gemma-2-2b.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_K_S.gguf) | Q5_K_S | 1.75GB |
|
34 |
-
| [gemma-2-2b.Q5_K.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_K.gguf) | Q5_K |
|
35 |
| [gemma-2-2b.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_K_M.gguf) | Q5_K_M | 1.79GB |
|
36 |
| [gemma-2-2b.Q5_1.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_1.gguf) | Q5_1 | 1.87GB |
|
37 |
| [gemma-2-2b.Q6_K.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q6_K.gguf) | Q6_K | 2.0GB |
|
|
|
18 |
| [gemma-2-2b.IQ3_XS.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.IQ3_XS.gguf) | IQ3_XS | 1.22GB |
|
19 |
| [gemma-2-2b.IQ3_S.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.IQ3_S.gguf) | IQ3_S | 1.27GB |
|
20 |
| [gemma-2-2b.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K_S.gguf) | Q3_K_S | 1.27GB |
|
21 |
+
| [gemma-2-2b.IQ3_M.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.IQ3_M.gguf) | IQ3_M | 0.46GB |
|
22 |
| [gemma-2-2b.Q3_K.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K.gguf) | Q3_K | 1.36GB |
|
23 |
| [gemma-2-2b.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K_M.gguf) | Q3_K_M | 1.36GB |
|
24 |
| [gemma-2-2b.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q3_K_L.gguf) | Q3_K_L | 1.44GB |
|
|
|
31 |
| [gemma-2-2b.Q4_1.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q4_1.gguf) | Q4_1 | 1.64GB |
|
32 |
| [gemma-2-2b.Q5_0.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_0.gguf) | Q5_0 | 1.75GB |
|
33 |
| [gemma-2-2b.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_K_S.gguf) | Q5_K_S | 1.75GB |
|
34 |
+
| [gemma-2-2b.Q5_K.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_K.gguf) | Q5_K | 0.74GB |
|
35 |
| [gemma-2-2b.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_K_M.gguf) | Q5_K_M | 1.79GB |
|
36 |
| [gemma-2-2b.Q5_1.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q5_1.gguf) | Q5_1 | 1.87GB |
|
37 |
| [gemma-2-2b.Q6_K.gguf](https://huggingface.co/RichardErkhov/google_-_gemma-2-2b-gguf/blob/main/gemma-2-2b.Q6_K.gguf) | Q6_K | 2.0GB |
|