{ "per_channel": false, "reduce_range": false, "per_model_config": { "decoder_model": { "op_types": [ "Constant", "Slice", "Squeeze", "Gather", "Where", "Pow", "Gemm", "Split", "Div", "Add", "ConstantOfShape", "Shape", "Concat", "Sqrt", "Transpose", "Mul", "MatMul", "Tanh", "ReduceMean", "Softmax", "Sub", "Cast", "Reshape", "Range", "Unsqueeze" ], "weight_type": "QInt8" }, "decoder_model_merged": { "op_types": [ "Constant", "Slice", "Squeeze", "Gather", "Where", "Pow", "Gemm", "Add", "Div", "Split", "ConstantOfShape", "Shape", "Concat", "Sqrt", "If", "Transpose", "Mul", "MatMul", "Tanh", "ReduceMean", "Softmax", "Sub", "Cast", "Reshape", "Range", "Unsqueeze" ], "weight_type": "QInt8" }, "decoder_with_past_model": { "op_types": [ "Constant", "Slice", "Squeeze", "Gather", "Where", "Pow", "Gemm", "Split", "Div", "Add", "ConstantOfShape", "Shape", "Concat", "Sqrt", "Transpose", "Mul", "MatMul", "Tanh", "ReduceMean", "Softmax", "Sub", "Cast", "Reshape", "Range", "Unsqueeze" ], "weight_type": "QInt8" } } }