@@ -51,20 +51,20 @@ def upload_onnx_model(model_name, zoo_dir, backup=False, only_local=False):
51
51
model_dir = os .path .join (zoo_dir , model_name )
52
52
suffix = '-backup' if backup else ''
53
53
if backup :
54
- print ('Backing up the previous version of ONNX model {}...' . format ( model_name ) )
55
- rel_file_name = '{}{ }.tar.gz'. format ( model_name , suffix )
54
+ print (f 'Backing up the previous version of ONNX model { model_name } ...' )
55
+ rel_file_name = f' { model_name } { suffix } .tar.gz'
56
56
abs_file_name = os .path .join (zoo_dir , rel_file_name )
57
- print ('Compressing {} model to {}' . format ( model_name , abs_file_name ) )
57
+ print (f 'Compressing { model_name } model to { abs_file_name } ' )
58
58
with tarfile .open (abs_file_name , 'w:gz' ) as f :
59
59
f .add (model_dir , arcname = model_name )
60
60
file_size = os .stat (abs_file_name ).st_size
61
- print ('Uploading {} ({} MB) to s3 cloud...' . format ( abs_file_name , float ( file_size ) / 1024 / 1024 ) )
61
+ print (f 'Uploading { abs_file_name } ({ float ( file_size ) / 1024 / 1024 } MB) to s3 cloud...' )
62
62
client = boto3 .client ('s3' , 'us-east-1' )
63
63
transfer = boto3 .s3 .transfer .S3Transfer (client )
64
- transfer .upload_file (abs_file_name , 'download.onnx' , 'models/latest/{}' . format ( rel_file_name ) ,
64
+ transfer .upload_file (abs_file_name , 'download.onnx' , f 'models/latest/{ rel_file_name } ' ,
65
65
extra_args = {'ACL' : 'public-read' })
66
66
67
- print ('Successfully uploaded {} to s3!' . format ( rel_file_name ) )
67
+ print (f 'Successfully uploaded { rel_file_name } to s3!' )
68
68
69
69
70
70
def download_onnx_model (model_name , zoo_dir , use_cache = True , only_local = False ):
@@ -75,7 +75,7 @@ def download_onnx_model(model_name, zoo_dir, use_cache=True, only_local=False):
75
75
return
76
76
else :
77
77
shutil .rmtree (model_dir )
78
- url = 'https://s3.amazonaws.com/download.onnx/models/latest/{}.tar.gz' . format ( model_name )
78
+ url = f 'https://s3.amazonaws.com/download.onnx/models/latest/{ model_name } .tar.gz'
79
79
80
80
download_file = tempfile .NamedTemporaryFile (delete = False )
81
81
try :
@@ -84,10 +84,10 @@ def download_onnx_model(model_name, zoo_dir, use_cache=True, only_local=False):
84
84
model_name , url , download_file .name ))
85
85
urlretrieve (url , download_file .name )
86
86
with tarfile .open (download_file .name ) as t :
87
- print ('Extracting ONNX model {} to {} ...\n ' . format ( model_name , zoo_dir ) )
87
+ print (f 'Extracting ONNX model { model_name } to { zoo_dir } ...\n ' )
88
88
t .extractall (zoo_dir )
89
89
except Exception as e :
90
- print ('Failed to download/backup data for ONNX model {}: {}' . format ( model_name , e ) )
90
+ print (f 'Failed to download/backup data for ONNX model { model_name } : { e } ' )
91
91
if not os .path .exists (model_dir ):
92
92
os .makedirs (model_dir )
93
93
finally :
@@ -119,7 +119,7 @@ def download_caffe2_model(model_name, zoo_dir, use_cache=True):
119
119
# (Sep 17, 2017)
120
120
downloadFromURLToFile (url , dest )
121
121
except Exception as e :
122
- print ("Abort: {reason}" . format ( reason = e ) )
122
+ print (f "Abort: { e } " )
123
123
print ("Cleaning up..." )
124
124
deleteDirectory (model_dir )
125
125
raise
@@ -131,14 +131,14 @@ def caffe2_to_onnx(caffe2_model_name, caffe2_model_dir):
131
131
132
132
with open (os .path .join (caffe2_model_dir , 'init_net.pb' ), 'rb' ) as f :
133
133
caffe2_init_proto .ParseFromString (f .read ())
134
- caffe2_init_proto .name = '{ }_init'. format ( caffe2_model_name )
134
+ caffe2_init_proto .name = f' { caffe2_model_name } _init'
135
135
with open (os .path .join (caffe2_model_dir , 'predict_net.pb' ), 'rb' ) as f :
136
136
caffe2_predict_proto .ParseFromString (f .read ())
137
137
caffe2_predict_proto .name = caffe2_model_name
138
138
with open (os .path .join (caffe2_model_dir , 'value_info.json' ), 'rb' ) as f :
139
139
value_info = json .loads (f .read ())
140
140
141
- print ('Converting Caffe2 model {} in {} to ONNX format' . format ( caffe2_model_name , caffe2_model_dir ) )
141
+ print (f 'Converting Caffe2 model { caffe2_model_name } in { caffe2_model_dir } to ONNX format' )
142
142
onnx_model = caffe2 .python .onnx .frontend .caffe2_net_to_onnx_model (
143
143
init_net = caffe2_init_proto ,
144
144
predict_net = caffe2_predict_proto ,
@@ -245,7 +245,7 @@ def onnx_verify(onnx_model, inputs, ref_outputs):
245
245
for onnx_model_name in model_mapping :
246
246
c2_model_name = model_mapping [onnx_model_name ]
247
247
248
- print ('####### Processing ONNX model {} ({} in Caffe2) #######' . format ( onnx_model_name , c2_model_name ) )
248
+ print (f '####### Processing ONNX model { onnx_model_name } ({ c2_model_name } in Caffe2) #######' )
249
249
download_caffe2_model (c2_model_name , caffe2_zoo_dir , use_cache = use_cache )
250
250
download_onnx_model (onnx_model_name , onnx_zoo_dir , use_cache = use_cache , only_local = only_local )
251
251
@@ -261,19 +261,19 @@ def onnx_verify(onnx_model, inputs, ref_outputs):
261
261
262
262
onnx_model , c2_init_net , c2_predict_net = caffe2_to_onnx (c2_model_name , os .path .join (caffe2_zoo_dir , c2_model_name ))
263
263
264
- print ('Deleteing old ONNX {} model...' . format ( onnx_model_name ) )
264
+ print (f 'Deleteing old ONNX { onnx_model_name } model...' )
265
265
for f in glob .glob (os .path .join (onnx_model_dir , 'model*' .format (onnx_model_name ))):
266
266
os .remove (f )
267
267
268
- print ('Serializing generated ONNX {} model ...' . format ( onnx_model_name ) )
268
+ print (f 'Serializing generated ONNX { onnx_model_name } model ...' )
269
269
with open (os .path .join (onnx_model_dir , 'model.onnx' ), 'wb' ) as file :
270
270
file .write (onnx_model .SerializeToString ())
271
271
272
- print ('Verifying model {} with ONNX model checker...' . format ( onnx_model_name ) )
272
+ print (f 'Verifying model { onnx_model_name } with ONNX model checker...' )
273
273
onnx .checker .check_model (onnx_model )
274
274
275
275
total_existing_data_set = 0
276
- print ('Verifying model {} with existing test data...' . format ( onnx_model_name ) )
276
+ print (f 'Verifying model { onnx_model_name } with existing test data...' )
277
277
for f in glob .glob (os .path .join (onnx_model_dir , '*.npz' )):
278
278
test_data = np .load (f , encoding = 'bytes' )
279
279
inputs = list (test_data ['inputs' ])
@@ -285,41 +285,41 @@ def onnx_verify(onnx_model, inputs, ref_outputs):
285
285
inputs_num = len (glob .glob (os .path .join (f , 'input_*.pb' )))
286
286
for i in range (inputs_num ):
287
287
tensor = onnx .TensorProto ()
288
- with open (os .path .join (f , 'input_{}.pb' . format ( i ) ), 'rb' ) as pf :
288
+ with open (os .path .join (f , f 'input_{ i } .pb' ), 'rb' ) as pf :
289
289
tensor .ParseFromString (pf .read ())
290
290
inputs .append (numpy_helper .to_array (tensor ))
291
291
ref_outputs = []
292
292
ref_outputs_num = len (glob .glob (os .path .join (f , 'output_*.pb' )))
293
293
for i in range (ref_outputs_num ):
294
294
tensor = onnx .TensorProto ()
295
- with open (os .path .join (f , 'output_{}.pb' . format ( i ) ), 'rb' ) as pf :
295
+ with open (os .path .join (f , f 'output_{ i } .pb' ), 'rb' ) as pf :
296
296
tensor .ParseFromString (pf .read ())
297
297
ref_outputs .append (numpy_helper .to_array (tensor ))
298
298
onnx_verify (onnx_model , inputs , ref_outputs )
299
299
total_existing_data_set += 1
300
300
301
301
starting_index = 0
302
- while os .path .exists (os .path .join (onnx_model_dir , 'test_data_set_{}' . format ( starting_index ) )):
302
+ while os .path .exists (os .path .join (onnx_model_dir , f 'test_data_set_{ starting_index } ' )):
303
303
starting_index += 1
304
304
305
305
if total_existing_data_set == 0 and add_test_data == 0 :
306
306
add_test_data = 3
307
307
total_existing_data_set = 3
308
308
309
- print ('Generating {} sets of new test data...' . format ( add_test_data ) )
309
+ print (f 'Generating { add_test_data } sets of new test data...' )
310
310
for i in range (starting_index , add_test_data + starting_index ):
311
- data_dir = os .path .join (onnx_model_dir , 'test_data_set_{}' . format ( i ) )
311
+ data_dir = os .path .join (onnx_model_dir , f 'test_data_set_{ i } ' )
312
312
os .makedirs (data_dir )
313
313
inputs = generate_test_input_data (onnx_model , 255 )
314
314
ref_outputs = generate_test_output_data (c2_init_net , c2_predict_net , inputs )
315
315
onnx_verify (onnx_model , inputs , ref_outputs )
316
316
for index , input in enumerate (inputs ):
317
317
tensor = numpy_helper .from_array (input [1 ])
318
- with open (os .path .join (data_dir , 'input_{}.pb' . format ( index ) ), 'wb' ) as file :
318
+ with open (os .path .join (data_dir , f 'input_{ index } .pb' ), 'wb' ) as file :
319
319
file .write (tensor .SerializeToString ())
320
320
for index , output in enumerate (ref_outputs ):
321
321
tensor = numpy_helper .from_array (output )
322
- with open (os .path .join (data_dir , 'output_{}.pb' . format ( index ) ), 'wb' ) as file :
322
+ with open (os .path .join (data_dir , f 'output_{ index } .pb' ), 'wb' ) as file :
323
323
file .write (tensor .SerializeToString ())
324
324
325
325
del onnx_model
0 commit comments