Update README.md
Browse files
README.md
CHANGED
@@ -6,7 +6,19 @@ tags:
|
|
6 |
- gguf
|
7 |
- latxa
|
8 |
- 7b
|
|
|
|
|
|
|
|
|
9 |
---
|
10 |
|
11 |
# Latxa 7b GGUF
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
- gguf
|
7 |
- latxa
|
8 |
- 7b
|
9 |
+
- hitz
|
10 |
+
- llama
|
11 |
+
model_name: latxa-7b-v1
|
12 |
+
base_model: HiTZ/latxa-7b-v1
|
13 |
---
|
14 |
|
15 |
# Latxa 7b GGUF
|
16 |
|
17 |
+
|
18 |
+
## Provided files
|
19 |
+
|
20 |
+
| Name | Quant method | Bits | Size | Max RAM required | Use case |
|
21 |
+
| ---- | ---- | ---- | ---- | ---- | ----- |
|
22 |
+
| [latxa-7b-v1.gguf](https://huggingface.co/xezpeleta/latxa-7b-v1-gguf/blob/main/latxa-7b-v1.gguf) | | | 26 GB| | |
|
23 |
+
| [latxa-7b-v1-f16.gguf](https://huggingface.co/xezpeleta/latxa-7b-v1-gguf/blob/main/latxa-7b-v1-f16.gguf) | | | 13 GB| | |
|
24 |
+
| [latxa-7b-v1-q8_0.gguf](https://huggingface.co/xezpeleta/latxa-7b-v1-gguf/blob/main/latxa-7b-v1-q8_0.gguf) | Q8_0 | | 6,7 GB| | |
|