@@ -69,14 +69,11 @@ BasicBackend::BasicBackend(std::unique_ptr<ONNX_NAMESPACE::ModelProto>& model_pr
69
69
subgraph_context_.subgraph_name );
70
70
model_stream.reset (); // Delete stream after it is no longer needed
71
71
} else {
72
- std::shared_ptr<const OVNetwork> ov_model;
73
- {
74
- const std::string model = model_proto->SerializeAsString ();
75
- if (!subgraph_context.has_dynamic_input_shape ) {
76
- delete model_proto.release ();
77
- }
78
- ov_model = CreateOVModel (model, session_context_, const_outputs_map_);
72
+ std::string model = model_proto->SerializeAsString ();
73
+ if (!subgraph_context.has_dynamic_input_shape ) {
74
+ model_proto.reset ()
79
75
}
76
+ auto ov_model = CreateOVModel (std::move (model), session_context_, const_outputs_map_);
80
77
LOGS_DEFAULT (INFO) << log_tag << " IO Buffering Enabled" ;
81
78
exe_network_ = OVCore::Get ()->CompileModel (
82
79
ov_model, remote_context_, subgraph_context_.subgraph_name );
@@ -108,14 +105,11 @@ BasicBackend::BasicBackend(std::unique_ptr<ONNX_NAMESPACE::ModelProto>& model_pr
108
105
subgraph_context_.subgraph_name );
109
106
} else { // For all other types use ov::ov_core read_model() to generate OV IR
110
107
// followed by ov::ov_core compile_model()
111
- std::shared_ptr<const OVNetwork> ov_model;
112
- {
113
- const std::string model = model_proto->SerializeAsString ();
114
- if (!subgraph_context.has_dynamic_input_shape ) {
115
- delete model_proto.release ();
116
- }
117
- ov_model = CreateOVModel (std::move (model), session_context_, const_outputs_map_);
108
+ std::string model = model_proto->SerializeAsString ();
109
+ if (!subgraph_context.has_dynamic_input_shape ) {
110
+ model_proto.reset ();
118
111
}
112
+ auto ov_model = CreateOVModel (std::move (model), session_context_, const_outputs_map_);
119
113
exe_network_ = OVCore::Get ()->CompileModel (
120
114
ov_model, hw_target, device_config, subgraph_context_.subgraph_name );
121
115
}
0 commit comments