Skip to content

Commit 42cb92e

Browse files
authored
[RTTI] Replace std::dynamic_(pointer)?_casts with ov::as_type_(ptr)? - FEs (openvinotoolkit#28397)
### Details: - Replaced `std::dynamic_cast` and `std::dynamic_pointed_cast` with `ov::as_type` or `ov::as_type_ptr` respectively in src/frontends and src/tests directories, where applicable. ### Tickets: - CVS-160241 --------- Signed-off-by: Tomasz Jankowski <tomasz1.jankowski@intel.com>
1 parent a4ee2df commit 42cb92e

File tree

80 files changed

+220
-242
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

80 files changed

+220
-242
lines changed

src/core/include/openvino/core/type.hpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ typename std::enable_if<
8585
bool>::value,
8686
bool>::type
8787
is_type(Value value) {
88-
return value->get_type_info().is_castable(Type::get_type_info_static());
88+
return value && value->get_type_info().is_castable(Type::get_type_info_static());
8989
}
9090

9191
/// Casts a Value* to a Type* if it is of type Type, nullptr otherwise

src/frontends/ir/src/ir_deserializer.cpp

+7-7
Original file line numberDiff line numberDiff line change
@@ -533,18 +533,18 @@ std::shared_ptr<ov::Model> ov::XmlDeserializer::parse_function(const pugi::xml_n
533533
auto node = create_node(inputs, p.xml, weights, p.params);
534534
id_to_node[layer_id] = node;
535535

536-
if (const auto& parameter_node = std::dynamic_pointer_cast<ov::op::v0::Parameter>(node)) {
536+
if (const auto& parameter_node = ov::as_type_ptr<ov::op::v0::Parameter>(node)) {
537537
io_map.inputs.insert({layer_id, func_nodes.parameters.size()});
538538
func_nodes.parameters.emplace_back(parameter_node);
539539
}
540540

541-
if (const auto& result_node = std::dynamic_pointer_cast<ov::op::v0::Result>(node)) {
541+
if (const auto& result_node = ov::as_type_ptr<ov::op::v0::Result>(node)) {
542542
io_map.outputs.insert({layer_id, func_nodes.results.size()});
543543
func_nodes.results.emplace_back(result_node);
544544
}
545545

546-
if (const auto& sink = std::dynamic_pointer_cast<ov::op::Sink>(node)) {
547-
auto subgraph_op = std::dynamic_pointer_cast<ov::op::util::MultiSubGraphOp>(node);
546+
if (const auto& sink = ov::as_type_ptr<ov::op::Sink>(node)) {
547+
auto subgraph_op = ov::as_type_ptr<ov::op::util::MultiSubGraphOp>(node);
548548
if (subgraph_op) {
549549
for (const auto& body_model : subgraph_op->get_functions()) {
550550
if (body_model->get_sinks().size()) {
@@ -557,7 +557,7 @@ std::shared_ptr<ov::Model> ov::XmlDeserializer::parse_function(const pugi::xml_n
557557
}
558558
}
559559

560-
if (const auto& read_value = std::dynamic_pointer_cast<ov::op::util::ReadValueBase>(node)) {
560+
if (const auto& read_value = ov::as_type_ptr<ov::op::util::ReadValueBase>(node)) {
561561
variable_id_to_read_value[read_value->get_variable_id()] = read_value;
562562
}
563563

@@ -569,7 +569,7 @@ std::shared_ptr<ov::Model> ov::XmlDeserializer::parse_function(const pugi::xml_n
569569
func_nodes.parameters,
570570
pugixml::get_str_attr(root, "name", ""));
571571
for (const auto& sink : func_nodes.sinks) {
572-
if (const auto& assign = std::dynamic_pointer_cast<ov::op::util::AssignBase>(sink)) {
572+
if (const auto& assign = ov::as_type_ptr<ov::op::util::AssignBase>(sink)) {
573573
assign->add_control_dependency(variable_id_to_read_value.at(assign->get_variable_id()));
574574
}
575575
}
@@ -902,7 +902,7 @@ std::shared_ptr<ov::Node> ov::XmlDeserializer::create_node(const std::vector<ov:
902902
OPENVINO_THROW("Opset ", params.version, " doesn't contain the operation with type: ", type_name);
903903
}
904904
// Share Weights form constant blob
905-
if (auto constant = std::dynamic_pointer_cast<ov::op::v0::Constant>(ovNode)) {
905+
if (auto constant = ov::as_type_ptr<ov::op::v0::Constant>(ovNode)) {
906906
constant->alloc_buffer_on_visit_attributes(false);
907907
}
908908
ovNode->set_arguments(inputs);

src/frontends/jax/src/node_context.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ Any NodeContext::get_values_from_const_input(int index) const {
197197
index,
198198
" does not exist.");
199199
auto input_val = get_input(index);
200-
if (auto input = std::dynamic_pointer_cast<JaxFrameworkNode>(input_val.get_node_shared_ptr())) {
200+
if (auto input = ov::as_type_ptr<JaxFrameworkNode>(input_val.get_node_shared_ptr())) {
201201
const auto& attrs = input->get_attrs();
202202
if (attrs.find("none_value") != attrs.end()) {
203203
return {};

src/frontends/onnx/frontend/src/core/null_node.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ std::shared_ptr<ov::Node> NullNode::clone_with_new_inputs(const ov::OutputVector
1919
} // namespace ov
2020

2121
bool ov::op::util::is_null(const ov::Node* node) {
22-
return dynamic_cast<const ov::frontend::onnx::NullNode*>(node) != nullptr;
22+
return ov::as_type<const ov::frontend::onnx::NullNode>(node) != nullptr;
2323
}
2424

2525
bool ov::op::util::is_null(const std::shared_ptr<ov::Node>& node) {

src/frontends/onnx/frontend/src/op/com.microsoft/matmulnbits.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ ov::OutputVector matmulnbits(const ov::frontend::onnx::Node& node) {
5353
CHECK_VALID_NODE(node, blob_size > 0, "Wrong blob size: ", blob_size);
5454
// in documentation: ...Input B is a 2D constant Matrix.
5555
CHECK_VALID_NODE(node,
56-
dynamic_cast<v0::Constant*>(b_quantized.get_node()) != nullptr,
56+
ov::as_type<v0::Constant>(b_quantized.get_node()) != nullptr,
5757
"MatMulNBits limitation: accepting only a constant as a B input");
5858
CHECK_VALID_NODE(node,
5959
b_quantized.get_partial_shape().rank() == 3,
@@ -112,7 +112,7 @@ ov::OutputVector matmulnbits(const ov::frontend::onnx::Node& node) {
112112
}
113113

114114
{
115-
const auto b_const = std::dynamic_pointer_cast<v0::Constant>(b_quantized.get_node_shared_ptr());
115+
const auto b_const = ov::as_type_ptr<v0::Constant>(b_quantized.get_node_shared_ptr());
116116

117117
ov::Output<ov::Node> casted_b;
118118
ov::Shape casted_b_shape;

src/frontends/onnx/frontend/src/utils/common.cpp

+4-4
Original file line numberDiff line numberDiff line change
@@ -221,7 +221,7 @@ bool collect_translation_exceptions(const std::shared_ptr<ov::Model>& partially_
221221
};
222222

223223
for (const auto& node : partially_converted->get_ordered_ops()) {
224-
if (const auto& fw_node = std::dynamic_pointer_cast<ov::frontend::onnx::ONNXFrameworkNode>(node)) {
224+
if (const auto& fw_node = ov::as_type_ptr<ov::frontend::onnx::ONNXFrameworkNode>(node)) {
225225
const auto& attrs = fw_node->get_attrs();
226226
auto node_name = attrs.get_opset_name() + "." + attrs.get_type_name();
227227
if (unsupported_operations->count(node_name) > 0) {
@@ -230,7 +230,7 @@ bool collect_translation_exceptions(const std::shared_ptr<ov::Model>& partially_
230230

231231
print_unsupported(fw_node);
232232
unsupported_operations->insert(node_name);
233-
} else if (const auto& fw_node = std::dynamic_pointer_cast<ov::frontend::onnx::NotSupportedONNXNode>(node)) {
233+
} else if (const auto& fw_node = ov::as_type_ptr<ov::frontend::onnx::NotSupportedONNXNode>(node)) {
234234
const auto& attrs = fw_node->get_attrs();
235235

236236
if (fw_node->additional_error_message().empty()) {
@@ -248,7 +248,7 @@ bool collect_translation_exceptions(const std::shared_ptr<ov::Model>& partially_
248248
failures->insert(node_fail);
249249
}
250250

251-
} else if (const auto& if_node = std::dynamic_pointer_cast<ov::op::v8::If>(node)) {
251+
} else if (const auto& if_node = ov::as_type_ptr<ov::op::v8::If>(node)) {
252252
collect_translation_exceptions(if_node->get_then_body(),
253253
telemetry,
254254
output_stream,
@@ -259,7 +259,7 @@ bool collect_translation_exceptions(const std::shared_ptr<ov::Model>& partially_
259259
output_stream,
260260
unsupported_operations,
261261
failures);
262-
} else if (const auto& loop_node = std::dynamic_pointer_cast<ov::op::v5::Loop>(node)) {
262+
} else if (const auto& loop_node = ov::as_type_ptr<ov::op::v5::Loop>(node)) {
263263
collect_translation_exceptions(loop_node->get_function(),
264264
telemetry,
265265
output_stream,

src/frontends/onnx/frontend/src/utils/onnx_internal.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ void remove_dangling_parameters(std::shared_ptr<ov::Model>& model) {
3131
std::all_of(parameter_users.begin(),
3232
parameter_users.end(),
3333
[](const std::shared_ptr<ov::Node>& node) -> bool {
34-
return std::dynamic_pointer_cast<ov::frontend::onnx::ONNXFrameworkNode>(node) != nullptr;
34+
return ov::as_type_ptr<ov::frontend::onnx::ONNXFrameworkNode>(node) != nullptr;
3535
});
3636
if (is_dangling_parameter) {
3737
model->remove_parameter(parameter);
@@ -69,8 +69,8 @@ void convert_decoded_model(std::shared_ptr<ov::Model> model) {
6969
"' attribute in decoded model. Model probably wasn't created by FrontEnd::decode function.");
7070
auto onnx_graph = it->second.as<std::shared_ptr<ov::frontend::onnx::Graph>>();
7171
for (const auto& node : model->get_ordered_ops()) {
72-
if (auto raw_node = std::dynamic_pointer_cast<ov::frontend::onnx::ONNXFrameworkNode>(node)) {
73-
if (auto subgraph_node = std::dynamic_pointer_cast<ov::frontend::onnx::ONNXSubgraphFrameworkNode>(node)) {
72+
if (auto raw_node = ov::as_type_ptr<ov::frontend::onnx::ONNXFrameworkNode>(node)) {
73+
if (auto subgraph_node = ov::as_type_ptr<ov::frontend::onnx::ONNXSubgraphFrameworkNode>(node)) {
7474
subgraph_node->infer_inputs_from_parent();
7575
for (auto& model : subgraph_node->get_subgraph_models()) {
7676
convert_decoded_model(model);

src/frontends/onnx/tests/conversion.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ TEST(ONNXConversionExtensionTest, custom_op_with_custom_domain) {
6969
OV_ASSERT_NO_THROW(model = onnx::tests::convert_model("missing_op_domain.onnx", ext));
7070

7171
for (const auto& op : model->get_ops()) {
72-
if (const auto& add = std::dynamic_pointer_cast<ov::op::v1::Add>(op)) {
72+
if (const auto& add = ov::as_type_ptr<ov::op::v1::Add>(op)) {
7373
EXPECT_TRUE(add->get_rt_info().count("added_by_extension") == 1);
7474
return;
7575
}

src/frontends/onnx/tests/convert_partially_tests.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ namespace {
1919
std::shared_ptr<ov::op::util::FrameworkNode> get_framework_node_with_out_name(const std::shared_ptr<ov::Model>& model,
2020
const std::string& out_name) {
2121
for (const auto& op : model->get_ops()) {
22-
if (auto framework_node = std::dynamic_pointer_cast<ov::op::util::FrameworkNode>(op)) {
22+
if (auto framework_node = ov::as_type_ptr<ov::op::util::FrameworkNode>(op)) {
2323
for (const auto& out : op->outputs()) {
2424
if (out.get_any_name() == out_name) {
2525
return framework_node;

src/frontends/onnx/tests/onnx_import_convpool.in.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,7 @@ OPENVINO_TEST(${BACKEND_NAME}, onnx_model_max_pool_empty_auto_pad) {
334334
const auto model = convert_model("max_pool_empty_auto_pad.onnx");
335335

336336
for (const auto& op : model->get_ops()) {
337-
if (const auto max_pool = std::dynamic_pointer_cast<op::v8::MaxPool>(op)) {
337+
if (const auto max_pool = ov::as_type_ptr<op::v8::MaxPool>(op)) {
338338
EXPECT_EQ(max_pool->get_auto_pad(), op::PadType::EXPLICIT);
339339
return;
340340
}

src/frontends/onnx/tests/onnx_tensor_names.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ bool matching_node_found_in_graph(const std::vector<DerivedFromNode>& ops,
2929
const std::unordered_set<std::string>& output_names,
3030
int out_tensor_number = 0) {
3131
return std::any_of(std::begin(ops), std::end(ops), [&](const DerivedFromNode op) {
32-
if (const std::shared_ptr<OpType> casted = std::dynamic_pointer_cast<OpType>(op)) {
32+
if (const std::shared_ptr<OpType> casted = ov::as_type_ptr<OpType>(op)) {
3333
const auto& op_friendly_name = casted->get_friendly_name();
3434
const auto& op_output_names = casted->get_output_tensor(out_tensor_number).get_names();
3535
if (op_friendly_name == friendly_name && op_output_names == output_names) {
@@ -44,11 +44,11 @@ template <typename OpType, typename DerivedFromNode>
4444
std::shared_ptr<OpType> find_by_friendly_name(const std::vector<DerivedFromNode>& ops,
4545
const std::string& friendly_name) {
4646
const auto it = std::find_if(std::begin(ops), std::end(ops), [&friendly_name](const DerivedFromNode& op) {
47-
return op->get_friendly_name() == friendly_name && std::dynamic_pointer_cast<OpType>(op) != nullptr;
47+
return op->get_friendly_name() == friendly_name && ov::as_type_ptr<OpType>(op) != nullptr;
4848
});
4949

5050
if (it != std::end(ops)) {
51-
return std::dynamic_pointer_cast<OpType>(*it);
51+
return ov::as_type_ptr<OpType>(*it);
5252
} else {
5353
return nullptr;
5454
}

src/frontends/paddle/src/frontend.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -492,7 +492,7 @@ std::shared_ptr<ov::Model> FrontEnd::convert(const InputModel::Ptr& model) const
492492
void FrontEnd::convert(const std::shared_ptr<ov::Model>& partiallyConverted) const {
493493
for (const auto& node : partiallyConverted->get_ordered_ops()) {
494494
if (ov::is_type<FrameworkNode>(node)) {
495-
paddle::normalize_framework_node(std::dynamic_pointer_cast<FrameworkNode>(node), m_op_translators);
495+
paddle::normalize_framework_node(ov::as_type_ptr<FrameworkNode>(node), m_op_translators);
496496
}
497497
}
498498
for (const auto& result : partiallyConverted->get_results()) {

src/frontends/paddle/src/internal/pass/transform_fakequantize.cpp

+4-4
Original file line numberDiff line numberDiff line change
@@ -71,20 +71,20 @@ ov::frontend::paddle::pass::TransformFakeQuantize::TransformFakeQuantize() {
7171

7272
// check round mode
7373
// Fallback to the PDPD FE if the round_mode is HALF_AWAY_FROM_ZERO.
74-
const auto& round_node_cast = std::dynamic_pointer_cast<Round>(opsMap.at(round_label).get_node_shared_ptr());
74+
const auto& round_node_cast = ov::as_type_ptr<Round>(opsMap.at(round_label).get_node_shared_ptr());
7575
if (!round_node_cast || round_node_cast->get_mode() != Round::RoundMode::HALF_TO_EVEN) {
7676
return false;
7777
}
7878

7979
// check quantize_linear zero_point
80-
auto zp_node_cast = std::dynamic_pointer_cast<Constant>(opsMap.at(dq_zp_label).get_node_shared_ptr());
80+
auto zp_node_cast = ov::as_type_ptr<Constant>(opsMap.at(dq_zp_label).get_node_shared_ptr());
8181
float zp;
8282
if (!zp_node_cast || !ov::op::util::get_single_value(zp_node_cast, zp)) {
8383
return false;
8484
}
8585

8686
// prepare levels
87-
const auto& clamp_node_cast = std::dynamic_pointer_cast<Clamp>(opsMap.at(q_clamp_label).get_node_shared_ptr());
87+
const auto& clamp_node_cast = ov::as_type_ptr<Clamp>(opsMap.at(q_clamp_label).get_node_shared_ptr());
8888
if (!clamp_node_cast) {
8989
return false;
9090
}
@@ -93,7 +93,7 @@ ov::frontend::paddle::pass::TransformFakeQuantize::TransformFakeQuantize() {
9393
const auto levels = high_range - low_range + 1;
9494

9595
// get the scale
96-
const auto& scale_node_cast = std::dynamic_pointer_cast<Constant>(
96+
const auto& scale_node_cast = ov::as_type_ptr<Constant>(
9797
opsMap.at(q_real_scale_label).get_node_shared_ptr()->get_input_node_shared_ptr(0));
9898
float scale;
9999
if (!scale_node_cast || !ov::op::util::get_single_value(scale_node_cast, scale)) {

src/frontends/paddle/src/internal/pass/transform_if.cpp

+1-2
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@ ov::frontend::paddle::pass::TransformIf::TransformIf(std::vector<std::shared_ptr
2323
const auto cond_label = pattern::wrap_type<ov::op::internal::ConditionalBlock>();
2424

2525
matcher_pass_callback callback = [funcs](pattern::Matcher& m) -> bool {
26-
const auto conditional_block =
27-
std::dynamic_pointer_cast<ov::op::internal::ConditionalBlock>(m.get_match_root());
26+
const auto conditional_block = ov::as_type_ptr<ov::op::internal::ConditionalBlock>(m.get_match_root());
2827
const auto mask_idx = conditional_block->get_input_size() - 1;
2928
const auto cond = conditional_block->get_input_node_shared_ptr(mask_idx);
3029

src/frontends/paddle/src/internal/pass/transform_while.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ ov::frontend::paddle::pass::TransformWhile::TransformWhile(std::vector<std::shar
2929
const auto while_label = pattern::wrap_type<ov::op::internal::While>();
3030

3131
matcher_pass_callback callback = [functions](pattern::Matcher& m) -> bool {
32-
const auto& while_node = std::dynamic_pointer_cast<ov::op::internal::While>(m.get_match_root());
32+
const auto& while_node = ov::as_type_ptr<ov::op::internal::While>(m.get_match_root());
3333
if (!while_node)
3434
return false;
3535
const auto& inputs = while_node->input_values();

src/frontends/pytorch/src/frontend.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ std::shared_ptr<Model> FrontEnd::convert(const ov::frontend::InputModel::Ptr& mo
183183
auto place = inputs[i];
184184
if (place->get_names().size() != 0 && input_names.find(place->get_names().at(0)) != input_names.end()) {
185185
auto input = converted_model->input(place->get_names().at(0));
186-
auto param = std::dynamic_pointer_cast<ov::op::v0::Parameter>(input.get_node_shared_ptr());
186+
auto param = ov::as_type_ptr<ov::op::v0::Parameter>(input.get_node_shared_ptr());
187187
FRONT_END_GENERAL_CHECK(param, "Input is not a Parameter.");
188188
update_parameter_info(param, place, converted_model);
189189
} else {
@@ -205,7 +205,7 @@ std::shared_ptr<Model> FrontEnd::convert(const ov::frontend::InputModel::Ptr& mo
205205
update_parameter_info(parameters[idx], fplace, converted_model);
206206
} else {
207207
auto input = converted_model->input(fplace->get_names().at(0));
208-
auto param = std::dynamic_pointer_cast<ov::op::v0::Parameter>(input.get_node_shared_ptr());
208+
auto param = ov::as_type_ptr<ov::op::v0::Parameter>(input.get_node_shared_ptr());
209209
FRONT_END_GENERAL_CHECK(param, "Input is not a Parameter.");
210210
update_parameter_info(param, fplace, converted_model);
211211
}

src/frontends/pytorch/src/helper_ops/internal_op.hpp

+3
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,9 @@ class InternalOpDecoder : public DummyDecoder {
4141
};
4242

4343
class InternalOperation : public PtFrameworkNode {
44+
public:
45+
OPENVINO_OP("InternalOperation", "util", PtFrameworkNode);
46+
4447
protected:
4548
InternalOperation(const std::string& op_type,
4649
const OutputVector& inputs,

src/frontends/pytorch/src/helper_ops/packed_sequence.hpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ namespace pytorch {
1313

1414
class PackPadded : public InternalOperation {
1515
public:
16-
OPENVINO_OP("PackPadded", "util", ov::op::util::FrameworkNode);
16+
OPENVINO_OP("PackPadded", "util", InternalOperation);
1717
PackPadded(const Output<Node>& input, const Output<Node>& lengths)
1818
: InternalOperation("prim::PackPadded", {input, lengths}, 2, "This is PackedSequence pack operation.") {
1919
validate_and_infer_types();
@@ -27,7 +27,7 @@ class PackPadded : public InternalOperation {
2727

2828
class PadPacked : public InternalOperation {
2929
public:
30-
OPENVINO_OP("PadPacked", "util", ov::op::util::FrameworkNode);
30+
OPENVINO_OP("PadPacked", "util", InternalOperation);
3131
PadPacked(const Output<Node>& input, const Output<Node>& lengths)
3232
: InternalOperation("prim::PadPacked", {input, lengths}, 2, "This is PackedSequence unpack operation.") {
3333
validate_and_infer_types();

src/frontends/pytorch/src/node_context.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ Output<Node> NodeContext::get_input_from_visible_context(size_t index) const {
111111
FRONT_END_GENERAL_CHECK(index < get_input_size(), "Index ", index, " is lower then number of inputs.");
112112
auto input_tensor = get_input(static_cast<int>(index));
113113
auto input_node = input_tensor.get_node_shared_ptr();
114-
if (std::dynamic_pointer_cast<v0::Parameter>(input_node)) {
114+
if (ov::as_type_ptr<v0::Parameter>(input_node)) {
115115
// We need to look into external context for inputs that would be feed into this parameter
116116
size_t tensor_idx = m_translate_session->decode_tensor_name(input_node->output(0));
117117
if (m_ext_tensor_map.count(tensor_idx)) {
@@ -298,7 +298,7 @@ template <>
298298
std::string NodeContext::const_input<std::string>(size_t index) const {
299299
FRONT_END_GENERAL_CHECK(!input_is_none(index), "Input with index: ", index, " is none.");
300300
auto input_node = get_input_from_visible_context(index).get_node_shared_ptr();
301-
auto input = std::dynamic_pointer_cast<PtFrameworkNode>(input_node);
301+
auto input = ov::as_type_ptr<PtFrameworkNode>(input_node);
302302
FRONT_END_GENERAL_CHECK(input,
303303
"Input node with index ",
304304
index,
@@ -327,7 +327,7 @@ Any NodeContext::get_values_from_const_input(int index) const {
327327
if (input_is_none(index))
328328
return {};
329329
auto input_val = get_input_from_visible_context(index);
330-
if (auto input = std::dynamic_pointer_cast<PtFrameworkNode>(input_val.get_node_shared_ptr())) {
330+
if (auto input = ov::as_type_ptr<PtFrameworkNode>(input_val.get_node_shared_ptr())) {
331331
const auto& attrs = input->get_attrs();
332332
if (attrs.find("none_value") != attrs.end()) {
333333
return {};

0 commit comments

Comments
 (0)