6
6
``` python
7
7
import onnx
8
8
9
+ # onnx_model is an in-memory ModelProto
9
10
onnx_model = onnx.load(' path/to/the/model.onnx' )
10
- # `onnx_model` is a ModelProto struct
11
11
```
12
12
Runnable IPython notebooks:
13
- - [ load_model.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/load_model.ipynb)
13
+ - [ load_model.ipynb] ( /onnx/examples/load_model.ipynb )
14
14
15
15
## Loading an ONNX Model with External Data
16
16
@@ -37,7 +37,8 @@ load_external_data_for_model(onnx_model, 'data/directory/path/')
37
37
``` python
38
38
from onnx.external_data_helper import convert_model_to_external_data
39
39
40
- onnx_model = ... # Your model in memory as ModelProto
40
+ # onnx_model is an in-memory ModelProto
41
+ onnx_model = ...
41
42
convert_model_to_external_data(onnx_model, all_tensors_to_one_file = True , location = ' filename' , size_threshold = 1024 , convert_attribute = False )
42
43
# Then the onnx_model has converted raw data as external data
43
44
# Must be followed by save
@@ -47,20 +48,22 @@ convert_model_to_external_data(onnx_model, all_tensors_to_one_file=True, locatio
47
48
``` python
48
49
import onnx
49
50
50
- onnx_model = ... # Your model in memory as ModelProto
51
+ # onnx_model is an in-memory ModelProto
52
+ onnx_model = ...
51
53
52
54
# Save the ONNX model
53
55
onnx.save(onnx_model, ' path/to/the/model.onnx' )
54
56
```
55
57
Runnable IPython notebooks:
56
- - [ save_model.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/save_model.ipynb)
58
+ - [ save_model.ipynb] ( /onnx/examples/save_model.ipynb )
57
59
58
60
59
61
## Converting and Saving an ONNX Model to External Data
60
62
``` python
61
63
import onnx
62
64
63
- onnx_model = ... # Your model in memory as ModelProto
65
+ # onnx_model is an in-memory ModelProto
66
+ onnx_model = ...
64
67
onnx.save_model(onnx_model, ' path/to/save/the/model.onnx' , save_as_external_data = True , all_tensors_to_one_file = True , location = ' filename' , size_threshold = 1024 , convert_attribute = False )
65
68
# Then the onnx_model has converted raw data as external data and saved to specific directory
66
69
```
@@ -95,7 +98,7 @@ with open('tensor.pb', 'rb') as f:
95
98
print (' After saving and loading, new TensorProto:\n {} ' .format(new_tensor))
96
99
```
97
100
Runnable IPython notebooks:
98
- - [ np_array_tensorproto.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/np_array_tensorproto.ipynb)
101
+ - [ np_array_tensorproto.ipynb] ( /onnx/examples/np_array_tensorproto.ipynb )
99
102
100
103
## Creating an ONNX Model Using Helper Functions
101
104
``` python
@@ -120,18 +123,18 @@ Y = helper.make_tensor_value_info('Y', TensorProto.FLOAT, [3, 4])
120
123
121
124
# Create a node (NodeProto) - This is based on Pad-11
122
125
node_def = helper.make_node(
123
- ' Pad' , # node name
126
+ ' Pad' , # name
124
127
[' X' , ' pads' , ' value' ], # inputs
125
- [' Y' ], # outputs
126
- mode = ' constant' , # attributes
128
+ [' Y' ], # outputs
129
+ mode = ' constant' , # attributes
127
130
)
128
131
129
132
# Create the graph (GraphProto)
130
133
graph_def = helper.make_graph(
131
- [node_def],
132
- ' test-model' ,
133
- [X, pads, value],
134
- [Y],
134
+ [node_def], # nodes
135
+ ' test-model' , # name
136
+ [X, pads, value], # inputs
137
+ [Y], # outputs
135
138
)
136
139
137
140
# Create the model (ModelProto)
@@ -142,8 +145,8 @@ onnx.checker.check_model(model_def)
142
145
print (' The model is checked!' )
143
146
```
144
147
Runnable IPython notebooks:
145
- - [ make_model.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/make_model.ipynb)
146
- - [ Protobufs.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/Protobufs.ipynb)
148
+ - [ make_model.ipynb] ( /onnx/examples/make_model.ipynb )
149
+ - [ Protobufs.ipynb] ( /onnx/examples/Protobufs.ipynb )
147
150
148
151
## Checking an ONNX Model
149
152
``` python
@@ -156,11 +159,15 @@ onnx_model = onnx.load(model_path)
156
159
print (' The model is:\n {} ' .format(onnx_model))
157
160
158
161
# Check the model
159
- onnx.checker.check_model(onnx_model)
160
- print (' The model is checked!' )
162
+ try :
163
+ onnx.checker.check_model(onnx_model)
164
+ except onnx.checker.ValidationError as e:
165
+ print (' The model is invalid: %s ' % e)
166
+ else :
167
+ print (' The model is valid!' )
161
168
```
162
169
Runnable IPython notebooks:
163
- - [ check_model.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/check_model.ipynb)
170
+ - [ check_model.ipynb] ( /onnx/examples/check_model.ipynb )
164
171
165
172
### Checking a Large ONNX Model >2GB
166
173
Current checker supports checking models with external data, but for those models larger than 2GB, please use the model path for onnx.checker and the external data needs to be under the same directory.
@@ -204,7 +211,7 @@ onnx.checker.check_model(inferred_model)
204
211
print (' After shape inference, the shape info of Y is:\n {} ' .format(inferred_model.graph.value_info))
205
212
```
206
213
Runnable IPython notebooks:
207
- - [ shape_inference.ipynb] ( https://github.com/onnx/onnx/tree/master /onnx/examples/shape_inference.ipynb)
214
+ - [ shape_inference.ipynb] ( /onnx/examples/shape_inference.ipynb )
208
215
209
216
### Shape inference a Large ONNX Model >2GB
210
217
Current shape_inference supports models with external data, but for those models larger than 2GB, please use the model path for onnx.shape_inference.infer_shapes_path and the external data needs to be under the same directory. You can specify the output path for saving the inferred model; otherwise, the default output path is same as the original model path.
0 commit comments