|
{ |
|
"geitje-7b": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "fine-tuned", |
|
"model_name": "Rijgersberg/GEITje-7B", |
|
"model_type": "fine-tuned", |
|
"num_parameters": 7241732096, |
|
"quantization": "8-bit" |
|
}, |
|
"geitje-7b-chat": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "fine-tuned", |
|
"model_name": "Rijgersberg/GEITje-7B-chat", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 7241732096, |
|
"quantization": "8-bit" |
|
}, |
|
"gpt-neo-1.3b-dutch": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "pretrained", |
|
"model_name": "yhavinga/gpt-neo-1.3B-dutch", |
|
"model_type": "pretrained", |
|
"num_parameters": 1315575808, |
|
"quantization": "8-bit" |
|
}, |
|
"gpt-neo-125m-dutch": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "pretrained", |
|
"model_name": "yhavinga/gpt-neo-125M-dutch", |
|
"model_type": "pretrained", |
|
"num_parameters": 125198592, |
|
"quantization": "8-bit" |
|
}, |
|
"gpt2-large-dutch": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "pretrained", |
|
"model_name": "yhavinga/gpt2-large-dutch", |
|
"model_type": "pretrained", |
|
"num_parameters": 774030080, |
|
"quantization": "8-bit" |
|
}, |
|
"gpt2-medium-dutch": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "pretrained", |
|
"model_name": "yhavinga/gpt2-medium-dutch", |
|
"model_type": "pretrained", |
|
"num_parameters": 354823168, |
|
"quantization": "8-bit" |
|
}, |
|
"llama-2-13b-chat-dutch": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "fine-tuned", |
|
"model_name": "BramVanroy/Llama-2-13b-chat-dutch", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 13015864320, |
|
"quantization": "8-bit" |
|
}, |
|
"llama-2-13b-chat-hf": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "meta-llama/Llama-2-13b-chat-hf", |
|
"model_type": "RL-tuned", |
|
"num_parameters": 13015864320, |
|
"quantization": "8-bit" |
|
}, |
|
"llama-2-13b-hf": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "meta-llama/Llama-2-13b-hf", |
|
"model_type": "pretrained", |
|
"num_parameters": 13015864320, |
|
"quantization": "8-bit" |
|
}, |
|
"llama-2-7b-chat-hf": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "meta-llama/Llama-2-7b-chat-hf", |
|
"model_type": "RL-tuned", |
|
"num_parameters": 6738415616, |
|
"quantization": "8-bit" |
|
}, |
|
"llama-2-7b-hf": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "meta-llama/Llama-2-7b-hf", |
|
"model_type": "pretrained", |
|
"num_parameters": 6738415616, |
|
"quantization": "8-bit" |
|
}, |
|
"llama2-13b-ft-mc4_nl_cleaned_tiny": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "fine-tuned", |
|
"model_name": "BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny", |
|
"model_type": "fine-tuned", |
|
"num_parameters": 13015864320, |
|
"quantization": "8-bit" |
|
}, |
|
"mistral-7b-v0.1": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "none", |
|
"model_name": "mistralai/Mistral-7B-v0.1", |
|
"model_type": "pretrained", |
|
"num_parameters": 7241732096, |
|
"quantization": "8-bit" |
|
}, |
|
"mixtral-8x7b-v0.1": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "none", |
|
"model_name": "mistralai/Mixtral-8x7B-v0.1", |
|
"model_type": "pretrained", |
|
"num_parameters": 46702792704, |
|
"quantization": "8-bit" |
|
}, |
|
"neural-chat-7b-v3-1": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "Intel/neural-chat-7b-v3-1", |
|
"model_type": "RL-tuned", |
|
"num_parameters": 7241732096, |
|
"quantization": "8-bit" |
|
}, |
|
"orca-2-13b": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "microsoft/Orca-2-13b", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 13015895040, |
|
"quantization": "8-bit" |
|
}, |
|
"orca-2-7b": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "microsoft/Orca-2-7b", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 6738440192, |
|
"quantization": "8-bit" |
|
}, |
|
"phi-2": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "none", |
|
"model_name": "microsoft/phi-2", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 2779683840, |
|
"quantization": "8-bit" |
|
}, |
|
"towerbase-7b-v0.1": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "fine-tuned", |
|
"model_name": "Unbabel/TowerBase-7B-v0.1", |
|
"model_type": "fine-tuned", |
|
"num_parameters": 6738415616, |
|
"quantization": "8-bit" |
|
}, |
|
"towerinstruct-7b-v0.1": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "fine-tuned", |
|
"model_name": "Unbabel/TowerInstruct-7B-v0.1", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 6738472960, |
|
"quantization": "8-bit" |
|
}, |
|
"yi-6b": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "none", |
|
"model_name": "01-ai/Yi-6B", |
|
"model_type": "pretrained", |
|
"num_parameters": 6061035520, |
|
"quantization": "8-bit" |
|
}, |
|
"yi-6b-chat": { |
|
"compute_dtype": "auto", |
|
"dutch_coverage": "none", |
|
"model_name": "01-ai/Yi-6B-Chat", |
|
"model_type": "instruction-tuned", |
|
"num_parameters": 6061035520, |
|
"quantization": "8-bit" |
|
}, |
|
"zephyr-7b-beta": { |
|
"compute_dtype": "bfloat16", |
|
"dutch_coverage": "none", |
|
"model_name": "HuggingFaceH4/zephyr-7b-beta", |
|
"model_type": "RL-tuned", |
|
"num_parameters": 7241732096, |
|
"quantization": "8-bit" |
|
} |
|
} |