File size: 269 Bytes
9daa6e1 |
1 2 3 4 5 6 7 8 9 |
{
"model": "Llama",
"quantization": "Q5_1",
"quantization_version": "V2",
"container": "GGJT",
"converter": "llm-rs",
"hash": "c04ee7cecdf39c50d581fb6656b2c189db9e3b306d1d786fcc310d64a6297ae2",
"base_model": "openlm-research/open_llama_3b"
} |