{ "per_channel": false, "reduce_range": false, "per_model_config": { "decoder_with_past_model": { "op_types": [ "Gemm", "Cast", "Reshape", "Gather", "Split", "ConstantOfShape", "Where", "Mul", "Sqrt", "Unsqueeze", "Squeeze", "Constant", "Range", "Concat", "Softmax", "Transpose", "Div", "Sub", "Add", "ReduceMean", "Shape", "Slice", "Tanh", "Pow", "MatMul" ], "weight_type": "QInt8" }, "decoder_model": { "op_types": [ "Gemm", "Cast", "Reshape", "Gather", "Split", "ConstantOfShape", "Where", "Mul", "Sqrt", "Unsqueeze", "Squeeze", "Constant", "Range", "Concat", "Softmax", "Transpose", "Div", "Sub", "Add", "ReduceMean", "Shape", "Slice", "Tanh", "Pow", "MatMul" ], "weight_type": "QInt8" }, "decoder_model_merged": { "op_types": [ "Gemm", "Cast", "Reshape", "Gather", "Split", "ConstantOfShape", "Where", "Mul", "Sqrt", "Unsqueeze", "Squeeze", "Constant", "Range", "Concat", "Softmax", "Transpose", "Div", "Sub", "Add", "ReduceMean", "If", "Shape", "Slice", "Tanh", "Pow", "MatMul" ], "weight_type": "QInt8" } } }