diff --git a/src/common/transformations/include/transformations/utils/utils.hpp b/src/common/transformations/include/transformations/utils/utils.hpp index 946741e66cd761..d5e7d36f90b55d 100644 --- a/src/common/transformations/include/transformations/utils/utils.hpp +++ b/src/common/transformations/include/transformations/utils/utils.hpp @@ -66,37 +66,6 @@ inline bool has_decompression_converts(const std::shared_ptr& f return false; } -OPENVINO_DEPRECATED("Plugins should use ov::ISyncInferRequest::find_port") -inline std::string create_ie_output_name(const Output& output) { - const auto& prev_layer = output.get_node_shared_ptr(); - auto out_name = prev_layer->get_friendly_name(); - if (prev_layer->get_output_size() != 1) { - out_name += "." + std::to_string(output.get_index()); - } - return out_name; -} - -OPENVINO_DEPRECATED("Plugins should use ov::ISyncInferRequest::find_port") -inline std::string create_ie_output_name(const Output& output) { - OPENVINO_SUPPRESS_DEPRECATED_START - return create_ie_output_name(ov::Output(output.get_node(), output.get_index())); - OPENVINO_SUPPRESS_DEPRECATED_END -} - -OPENVINO_DEPRECATED("Plugins should use ov::ISyncInferRequest::find_port") -inline std::string get_ie_output_name(const Output& output) { - OPENVINO_SUPPRESS_DEPRECATED_START - return create_ie_output_name(output); - OPENVINO_SUPPRESS_DEPRECATED_END -} - -OPENVINO_DEPRECATED("Plugins should use ov::ISyncInferRequest::find_port") -inline std::string get_ie_output_name(const Output& output) { - OPENVINO_SUPPRESS_DEPRECATED_START - return get_ie_output_name(ov::Output(output.get_node(), output.get_index())); - OPENVINO_SUPPRESS_DEPRECATED_END -} - /** * \brief Convert epsilon value from double to float type. * diff --git a/src/common/transformations/src/transformations/op_conversions/convert_maxpool_downgrade.cpp b/src/common/transformations/src/transformations/op_conversions/convert_maxpool_downgrade.cpp index 4f6f935fcba058..29ba95f77a87cb 100644 --- a/src/common/transformations/src/transformations/op_conversions/convert_maxpool_downgrade.cpp +++ b/src/common/transformations/src/transformations/op_conversions/convert_maxpool_downgrade.cpp @@ -7,6 +7,7 @@ #include "itt.hpp" #include "openvino/core/descriptor/tensor.hpp" #include "openvino/core/graph_util.hpp" +#include "openvino/core/rt_info.hpp" #include "openvino/op/add.hpp" #include "openvino/op/avg_pool.hpp" #include "openvino/op/broadcast.hpp" @@ -24,7 +25,6 @@ #include "openvino/pass/manager.hpp" #include "openvino/pass/pattern/op/wrap_type.hpp" #include "openvino/pass/visualize_tree.hpp" -#include "transformations/utils/utils.hpp" ov::pass::ConvertMaxPool8ToMaxPool1::ConvertMaxPool8ToMaxPool1() { MATCHER_SCOPE(ConvertMaxPool8ToMaxPool1); @@ -49,10 +49,6 @@ ov::pass::ConvertMaxPool8ToMaxPool1::ConvertMaxPool8ToMaxPool1() { maxpool_v8_node->get_rounding_type(), maxpool_v8_node->get_auto_pad()); - OPENVINO_SUPPRESS_DEPRECATED_START - auto out_name = ov::op::util::create_ie_output_name(maxpool_v8_node->output(0)); - OPENVINO_SUPPRESS_DEPRECATED_END - maxpool_v1_node->set_friendly_name(maxpool_v8_node->get_friendly_name()); maxpool_v8_node->output(0).replace(maxpool_v1_node->output(0)); ov::copy_runtime_info(maxpool_v8_node, maxpool_v1_node); diff --git a/src/common/transformations/src/transformations/op_conversions/convert_nms9_to_nms_ie_internal.cpp b/src/common/transformations/src/transformations/op_conversions/convert_nms9_to_nms_ie_internal.cpp index 8b1c572dfaa73d..abf2d9b1bf6ee5 100644 --- a/src/common/transformations/src/transformations/op_conversions/convert_nms9_to_nms_ie_internal.cpp +++ b/src/common/transformations/src/transformations/op_conversions/convert_nms9_to_nms_ie_internal.cpp @@ -14,9 +14,9 @@ #include "openvino/op/convert.hpp" #include "openvino/op/non_max_suppression.hpp" #include "openvino/op/reshape.hpp" +#include "openvino/op/util/node_util.hpp" #include "openvino/pass/pattern/op/wrap_type.hpp" #include "ov_ops/nms_ie_internal.hpp" -#include "transformations/utils/utils.hpp" ov::pass::ConvertNMS9ToNMSIEInternal::ConvertNMS9ToNMSIEInternal() { MATCHER_SCOPE(ConvertNMS9ToNMSIEInternal); @@ -110,18 +110,14 @@ ov::pass::ConvertNMS9ToNMSIEInternal::ConvertNMS9ToNMSIEInternal() { Output output_0 = nms_legacy->output(0); if (nms_9->output(0).get_element_type() != output_0.get_element_type()) { output_0 = std::make_shared(output_0, nms_9->output(0).get_element_type()); - OPENVINO_SUPPRESS_DEPRECATED_START - output_0.get_node_shared_ptr()->set_friendly_name(op::util::create_ie_output_name(nms_9->output(0))); - OPENVINO_SUPPRESS_DEPRECATED_END + output_0.get_node_shared_ptr()->set_friendly_name(ov::util::make_default_tensor_name(nms_9->output(0))); new_ops.emplace_back(output_0.get_node_shared_ptr()); } Output output_2 = nms_legacy->output(2); if (nms_9->output(2).get_element_type() != output_2.get_element_type()) { output_2 = std::make_shared(output_2, nms_9->output(2).get_element_type()); - OPENVINO_SUPPRESS_DEPRECATED_START - output_2.get_node_shared_ptr()->set_friendly_name(op::util::create_ie_output_name(nms_9->output(2))); - OPENVINO_SUPPRESS_DEPRECATED_END + output_2.get_node_shared_ptr()->set_friendly_name(ov::util::make_default_tensor_name(nms_9->output(2))); new_ops.emplace_back(output_2.get_node_shared_ptr()); } diff --git a/src/common/transformations/tests/op_conversions/convert_nms9_to_nms_ie_internal_test.cpp b/src/common/transformations/tests/op_conversions/convert_nms9_to_nms_ie_internal_test.cpp index 9cefe8f22fd1c0..58f80181a3384a 100644 --- a/src/common/transformations/tests/op_conversions/convert_nms9_to_nms_ie_internal_test.cpp +++ b/src/common/transformations/tests/op_conversions/convert_nms9_to_nms_ie_internal_test.cpp @@ -38,6 +38,7 @@ TEST_F(TransformationTestsF, ConvertPreviousNMSToNMSIEInternal) { score_threshold, op::v1::NonMaxSuppression::BoxEncodingType::CORNER, true); + nms->set_friendly_name("nms"); model = std::make_shared(OutputVector{nms}, ParameterVector{boxes, scores}); @@ -65,6 +66,7 @@ TEST_F(TransformationTestsF, ConvertPreviousNMSToNMSIEInternal) { true, element::i32); auto convert = std::make_shared(nms->output(0), element::i64); + convert->set_friendly_name("nms:0"); model_ref = std::make_shared(OutputVector{convert}, ParameterVector{boxes, scores}); } diff --git a/src/inference/src/dev/icompiled_model.cpp b/src/inference/src/dev/icompiled_model.cpp index c3dca80e73340f..7fd8b996d8b305 100644 --- a/src/inference/src/dev/icompiled_model.cpp +++ b/src/inference/src/dev/icompiled_model.cpp @@ -5,15 +5,27 @@ #include "openvino/runtime/icompiled_model.hpp" #include "openvino/core/model.hpp" +#include "openvino/op/util/node_util.hpp" #include "openvino/runtime/iasync_infer_request.hpp" #include "openvino/runtime/iplugin.hpp" #include "openvino/runtime/properties.hpp" -#include "transformations/utils/utils.hpp" #if defined(OPENVINO_GNU_LIBC) && !defined(__ANDROID__) # include #endif +namespace { +// Legacy tensor name format for IR v10 compatibility (uses '.' separator instead of ':') +// Can be removed when IR v10 support is deprecated +std::string make_ir_v10_tensor_name(const ov::Output& output) { + auto name = output.get_node()->get_friendly_name(); + if (output.get_node()->get_output_size() > 1) { + name += "." + std::to_string(output.get_index()); + } + return name; +} +} // namespace + ov::ICompiledModel::ICompiledModel(const std::shared_ptr& model, const std::shared_ptr& plugin, const std::shared_ptr& task_executor, @@ -84,9 +96,9 @@ ov::ICompiledModel::ICompiledModel(const std::shared_ptr& model for (const auto& result : model->get_results()) { auto fake_param = std::make_shared(result->get_output_element_type(0), result->get_output_partial_shape(0)); - OPENVINO_SUPPRESS_DEPRECATED_START - const std::string res_name = ov::op::util::create_ie_output_name(result->input_value(0)); - OPENVINO_SUPPRESS_DEPRECATED_END + const std::string res_name = add_operation_names + ? make_ir_v10_tensor_name(result->input_value(0)) + : ov::util::make_default_tensor_name(result->input_value(0)); fake_param->set_friendly_name(res_name); fake_param->set_element_type(result->get_element_type()); fake_param->validate_and_infer_types(); diff --git a/src/inference/src/model_reader.cpp b/src/inference/src/model_reader.cpp index a7622a2423a607..e4c9b482c9f365 100644 --- a/src/inference/src/model_reader.cpp +++ b/src/inference/src/model_reader.cpp @@ -12,9 +12,18 @@ #include "openvino/runtime/shared_buffer.hpp" #include "openvino/util/common_util.hpp" #include "openvino/util/file_util.hpp" -#include "transformations/utils/utils.hpp" namespace { +// Legacy tensor name format for IR v10 compatibility (uses '.' separator instead of ':') +// Can be removed when IR v10 support is deprecated +std::string make_ir_v10_tensor_name(const ov::Output& output) { + auto name = output.get_node()->get_friendly_name(); + if (output.get_node()->get_output_size() > 1) { + name += "." + std::to_string(output.get_index()); + } + return name; +} + ov::element::Type to_legacy_type(const ov::element::Type& legacy_type, bool input) { if (input) { return legacy_type == ov::element::f16 ? ov::element::f32 : legacy_type; @@ -77,11 +86,7 @@ void update_v10_model(std::shared_ptr& model, bool frontendMode = fal // we need to add operation names as tensor names for inputs and outputs { for (const auto& result : model->get_results()) { - OPENVINO_SUPPRESS_DEPRECATED_START - // Note, upon removal of 'create_ie_output_name', just move it to this file as a local function - // we still need to add operation names as tensor names for outputs for IR v10 - auto res_name = ov::op::util::create_ie_output_name(result->input_value(0)); - OPENVINO_SUPPRESS_DEPRECATED_END + auto res_name = make_ir_v10_tensor_name(result->input_value(0)); OPENVINO_ASSERT(leaf_names.find(res_name) == leaf_names.end() || result->output(0).get_names().find(res_name) != result->output(0).get_names().end(), "Model operation names have collisions with tensor names.",