-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathquantize_config.json
115 lines (115 loc) · 2.77 KB
/
quantize_config.json
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
{
"per_channel": false,
"reduce_range": false,
"per_model_config": {
"encoder_model": {
"op_types": [
"Mul",
"Pow",
"Add",
"Div",
"Softmax",
"Conv",
"Erf",
"Sub",
"Transpose",
"Reshape",
"Gather",
"ReduceMean",
"Sqrt",
"Constant",
"MatMul",
"Concat",
"Unsqueeze",
"Shape"
],
"weight_type": "QUInt8"
},
"decoder_model": {
"op_types": [
"Pow",
"Expand",
"Transpose",
"Less",
"Softmax",
"Range",
"ConstantOfShape",
"Mul",
"Add",
"Sub",
"Where",
"Gather",
"Squeeze",
"MatMul",
"Erf",
"Cast",
"Reshape",
"Concat",
"Unsqueeze",
"Div",
"Slice",
"Equal",
"ReduceMean",
"Sqrt",
"Constant",
"Shape"
],
"weight_type": "QInt8"
},
"decoder_with_past_model": {
"op_types": [
"Mul",
"Pow",
"Add",
"Div",
"Softmax",
"Slice",
"Sub",
"Erf",
"Transpose",
"Reshape",
"Gather",
"ReduceMean",
"Sqrt",
"Concat",
"Constant",
"MatMul",
"Unsqueeze",
"Shape"
],
"weight_type": "QInt8"
},
"decoder_model_merged": {
"op_types": [
"Pow",
"Expand",
"Transpose",
"Less",
"Softmax",
"Range",
"ConstantOfShape",
"Mul",
"Add",
"Sub",
"Where",
"Gather",
"Squeeze",
"MatMul",
"Erf",
"Cast",
"Reshape",
"Concat",
"Unsqueeze",
"Div",
"If",
"Slice",
"Equal",
"ReduceMean",
"Sqrt",
"Constant",
"Shape"
],
"weight_type": "QInt8"
}
}
}