falcon-rw-1b-onnx-js-quantized / quantize_config.json
dmmagdal's picture
Upload folder using huggingface_hub
cf16894 verified
raw
history blame contribute delete
889 Bytes
{
"per_channel": false,
"reduce_range": false,
"per_model_config": {
"model": {
"op_types": [
"Mul",
"Slice",
"ConstantOfShape",
"Less",
"Constant",
"Reshape",
"Softmax",
"Expand",
"ReduceMean",
"Concat",
"Transpose",
"Erf",
"Div",
"Gather",
"CumSum",
"Equal",
"Squeeze",
"Where",
"Pow",
"Cast",
"MatMul",
"Shape",
"Sqrt",
"Sub",
"Unsqueeze",
"Range",
"Add"
],
"weight_type": "QInt8"
}
}
}