sadkins65's picture
Upload folder using huggingface_hub
f5bce8e verified
raw
history blame
312 Bytes
test_stage:
quant_modifiers:
vLLMQuantizationModifier:
ignore: [lm_head]
config_groups:
group_0:
weights: {num_bits: 4, type: int, symmetric: true, strategy: channel}
targets: [Linear]
SparseGPTModifier: {sparsity: 0.0, quantize: true, sequential_update: false}