From 539c459a16878a099c7f3d8c4380f759c81bd9ed Mon Sep 17 00:00:00 2001 From: Mircea-Aurelian Dan Date: Mon, 9 Dec 2024 09:00:20 +0000 Subject: [PATCH] Fix clang formats --- .../openvino/runtime/shared_buffer.hpp | 4 +- src/inference/src/cache_manager.hpp | 3 +- src/inference/src/dev/compilation_context.cpp | 3 +- src/inference/src/dev/iplugin.cpp | 4 +- src/inference/src/dev/plugin.cpp | 4 +- src/inference/src/dev/plugin.hpp | 5 +- src/plugins/intel_cpu/src/plugin.cpp | 27 +++---- src/plugins/intel_cpu/src/plugin.h | 9 +-- src/plugins/intel_cpu/src/utils/serialize.cpp | 70 +++++++++++-------- src/plugins/intel_cpu/src/utils/serialize.hpp | 8 ++- .../include/intel_npu/common/igraph.hpp | 12 +--- .../common/include/intel_npu/common/npu.hpp | 3 - .../src/driver_compiler_adapter.cpp | 6 +- .../src/compiler_adapter/src/driver_graph.cpp | 2 +- .../src/plugin_compiler_adapter.cpp | 12 ++-- .../src/compiler_adapter/src/plugin_graph.cpp | 12 +++- .../src/ze_graph_ext_wrappers.cpp | 8 +-- .../intel_npu/src/plugin/include/metrics.hpp | 3 +- .../intel_npu/src/plugin/src/plugin.cpp | 6 +- 19 files changed, 109 insertions(+), 92 deletions(-) diff --git a/src/core/dev_api/openvino/runtime/shared_buffer.hpp b/src/core/dev_api/openvino/runtime/shared_buffer.hpp index 859675344c98b8..cdfe58f0741e1e 100644 --- a/src/core/dev_api/openvino/runtime/shared_buffer.hpp +++ b/src/core/dev_api/openvino/runtime/shared_buffer.hpp @@ -95,8 +95,8 @@ class OwningSharedStreamBuffer : public SharedStreamBuffer { } pos_type seekoff(off_type off, - std::ios_base::seekdir dir, - std::ios_base::openmode which = std::ios_base::in) override { + std::ios_base::seekdir dir, + std::ios_base::openmode which = std::ios_base::in) override { auto pos = SharedStreamBuffer::seekoff(off, dir, which); m_shared_obj->updateOffset(m_offset); return pos; diff --git a/src/inference/src/cache_manager.hpp b/src/inference/src/cache_manager.hpp index 0f2e1e27e2a950..aacace4f425d53 100644 --- a/src/inference/src/cache_manager.hpp +++ b/src/inference/src/cache_manager.hpp @@ -150,7 +150,8 @@ class FileStorageCacheManager final : public ICacheManager { reader(stream, shared_buffer); } else { std::ifstream stream(blob_file_name, std::ios_base::binary); - reader(stream, nullptr); } + reader(stream, nullptr); + } } } diff --git a/src/inference/src/dev/compilation_context.cpp b/src/inference/src/dev/compilation_context.cpp index 052f39d36c9008..34f7156190f231 100644 --- a/src/inference/src/dev/compilation_context.cpp +++ b/src/inference/src/dev/compilation_context.cpp @@ -156,7 +156,8 @@ std::string ModelCache::compute_hash(const std::string& modelStr, ////////////////////////////////////////////////// -CompiledBlobHeader::CompiledBlobHeader(std::shared_ptr model_buffer) : m_model_buffer(model_buffer) {} +CompiledBlobHeader::CompiledBlobHeader(std::shared_ptr model_buffer) + : m_model_buffer(model_buffer) {} CompiledBlobHeader::CompiledBlobHeader(const std::string& ieVersion, const std::string& fileInfo, diff --git a/src/inference/src/dev/iplugin.cpp b/src/inference/src/dev/iplugin.cpp index 42d735baa0449a..1e1b70af861b58 100644 --- a/src/inference/src/dev/iplugin.cpp +++ b/src/inference/src/dev/iplugin.cpp @@ -59,14 +59,14 @@ const std::string& ov::IPlugin::get_device_name() const { std::shared_ptr ov::IPlugin::import_model(std::istream& model, std::shared_ptr model_buffer, - const ov::AnyMap& properties) const{ + const ov::AnyMap& properties) const { OPENVINO_THROW_NOT_IMPLEMENTED("This method is not implemented"); } std::shared_ptr ov::IPlugin::import_model(std::istream& model, std::shared_ptr model_buffer, const ov::SoPtr& context, - const ov::AnyMap& properties) const{ + const ov::AnyMap& properties) const { OPENVINO_THROW_NOT_IMPLEMENTED("This method is not implemented"); } diff --git a/src/inference/src/dev/plugin.cpp b/src/inference/src/dev/plugin.cpp index 23e0e04bb6d0e0..605dc94e0ef487 100644 --- a/src/inference/src/dev/plugin.cpp +++ b/src/inference/src/dev/plugin.cpp @@ -79,7 +79,9 @@ ov::SoPtr ov::Plugin::import_model(std::istream& model, OV_PLUGIN_CALL_STATEMENT(return {m_ptr->import_model(model, context, config), m_so}); } -ov::SoPtr ov::Plugin::import_model(std::istream& model, std::shared_ptr model_buffer, const ov::AnyMap& properties) const { +ov::SoPtr ov::Plugin::import_model(std::istream& model, + std::shared_ptr model_buffer, + const ov::AnyMap& properties) const { OV_PLUGIN_CALL_STATEMENT(return {m_ptr->import_model(model, model_buffer, properties), m_so}); } diff --git a/src/inference/src/dev/plugin.hpp b/src/inference/src/dev/plugin.hpp index 004fcc04446c0a..bdc84737456aec 100644 --- a/src/inference/src/dev/plugin.hpp +++ b/src/inference/src/dev/plugin.hpp @@ -59,7 +59,9 @@ class Plugin { const ov::SoPtr& context, const ov::AnyMap& config) const; - SoPtr import_model(std::istream& model, std::shared_ptr model_buffer, const ov::AnyMap& properties) const; + SoPtr import_model(std::istream& model, + std::shared_ptr model_buffer, + const ov::AnyMap& properties) const; SoPtr import_model(std::istream& model, std::shared_ptr model_buffer, @@ -85,4 +87,3 @@ class Plugin { }; } // namespace ov - diff --git a/src/plugins/intel_cpu/src/plugin.cpp b/src/plugins/intel_cpu/src/plugin.cpp index 4dcceb1f4ee628..0ea50bdeb54f0f 100644 --- a/src/plugins/intel_cpu/src/plugin.cpp +++ b/src/plugins/intel_cpu/src/plugin.cpp @@ -7,6 +7,7 @@ #include "cpu_streams_calculation.hpp" #include "internal_properties.hpp" #include "itt.h" +#include "openvino/op/paged_attention.hpp" #include "openvino/runtime/intel_cpu/properties.hpp" #include "openvino/runtime/internal_properties.hpp" #include "openvino/runtime/properties.hpp" @@ -19,7 +20,6 @@ #include "utils/precision_support.h" #include "utils/serialize.hpp" #include "weights_cache.hpp" -#include "openvino/op/paged_attention.hpp" #if defined(__linux__) # include @@ -200,7 +200,7 @@ static Config::ModelType getModelType(const std::shared_ptr& model) return Config::ModelType::CNN; if ((op::util::has_op_with_type(model) && model->get_variables().size() > 0) || - op::util::has_op_with_type(model)) + op::util::has_op_with_type(model)) return Config::ModelType::LLM; return Config::ModelType::Unknown; @@ -446,15 +446,17 @@ ov::Any Plugin::get_ro_property(const std::string& name, const ov::AnyMap& optio return decltype(ov::supported_properties)::value_type(std::move(supportedProperties)); } else if (ov::internal::supported_properties == name) { - return decltype(ov::internal::supported_properties)::value_type{ + return decltype(ov::internal::supported_properties)::value_type { ov::PropertyName{ov::internal::caching_properties.name(), ov::PropertyMutability::RO}, #if !defined(OPENVINO_ARCH_ARM) && !(defined(__APPLE__) || defined(__MACOSX)) - ov::PropertyName{ov::internal::caching_with_mmap.name(), ov::PropertyMutability::RO}, + ov::PropertyName{ov::internal::caching_with_mmap.name(), ov::PropertyMutability::RO}, #endif - ov::PropertyName{ov::internal::exclusive_async_requests.name(), ov::PropertyMutability::RW}, - ov::PropertyName{ov::internal::compiled_model_runtime_properties.name(), ov::PropertyMutability::RO}, - ov::PropertyName{ov::internal::compiled_model_runtime_properties_supported.name(), - ov::PropertyMutability::RO}}; + ov::PropertyName{ov::internal::exclusive_async_requests.name(), ov::PropertyMutability::RW}, + ov::PropertyName{ov::internal::compiled_model_runtime_properties.name(), ov::PropertyMutability::RO}, + ov::PropertyName { + ov::internal::compiled_model_runtime_properties_supported.name(), ov::PropertyMutability::RO + } + }; } else if (name == ov::device::full_name) { return decltype(ov::device::full_name)::value_type(deviceFullName); } else if (name == ov::available_devices) { @@ -553,18 +555,16 @@ ov::SupportedOpsMap Plugin::query_model(const std::shared_ptr& return res; } -std::shared_ptr Plugin::import_model(std::istream& model_stream, - const ov::AnyMap& config) const { +std::shared_ptr Plugin::import_model(std::istream& model_stream, const ov::AnyMap& config) const { return import_model(model_stream, nullptr, config); } - std::shared_ptr Plugin::import_model(std::istream& model_stream, std::shared_ptr model_buffer, const ov::AnyMap& config) const { OV_ITT_SCOPE(FIRST_INFERENCE, itt::domains::intel_cpu_LT, "import_model"); - CacheDecrypt decrypt{ codec_xor }; + CacheDecrypt decrypt{codec_xor}; bool decript_from_string = false; if (config.count(ov::cache_encryption_callbacks.name())) { auto encryption_callbacks = config.at(ov::cache_encryption_callbacks.name()).as(); @@ -578,7 +578,8 @@ std::shared_ptr Plugin::import_model(std::istream& model_str [this](const std::shared_ptr& model, const std::shared_ptr& weights) { return get_core()->read_model(model, weights); }, - decrypt, decript_from_string); + decrypt, + decript_from_string); std::shared_ptr model; deserializer >> model; diff --git a/src/plugins/intel_cpu/src/plugin.h b/src/plugins/intel_cpu/src/plugin.h index fad3f388a2385f..c7f1dee9fb52c6 100644 --- a/src/plugins/intel_cpu/src/plugin.h +++ b/src/plugins/intel_cpu/src/plugin.h @@ -20,8 +20,7 @@ class Plugin : public ov::IPlugin { std::shared_ptr compile_model(const std::shared_ptr& model, const ov::AnyMap& properties, const ov::SoPtr& context) const override { - OPENVINO_THROW_NOT_IMPLEMENTED( - "compile_model with RemoteContext is not supported by CPU plugin!"); + OPENVINO_THROW_NOT_IMPLEMENTED("compile_model with RemoteContext is not supported by CPU plugin!"); }; void set_property(const ov::AnyMap& properties) override; @@ -30,8 +29,7 @@ class Plugin : public ov::IPlugin { std::shared_ptr import_model(std::istream& model, const ov::SoPtr& context, const ov::AnyMap& properties) const override { - OPENVINO_THROW_NOT_IMPLEMENTED( - "import_model with RemoteContext is not supported by CPU plugin!"); + OPENVINO_THROW_NOT_IMPLEMENTED("import_model with RemoteContext is not supported by CPU plugin!"); }; std::shared_ptr import_model(std::istream& model, @@ -41,8 +39,7 @@ class Plugin : public ov::IPlugin { std::shared_ptr model_buffer, const ov::SoPtr& context, const ov::AnyMap& properties) const override { - OPENVINO_THROW_NOT_IMPLEMENTED( - "import_model with RemoteContext is not supported by CPU plugin!"); + OPENVINO_THROW_NOT_IMPLEMENTED("import_model with RemoteContext is not supported by CPU plugin!"); }; ov::SupportedOpsMap query_model(const std::shared_ptr& model, diff --git a/src/plugins/intel_cpu/src/utils/serialize.cpp b/src/plugins/intel_cpu/src/utils/serialize.cpp index 33d8140fbe4a84..55b53116e4ac01 100644 --- a/src/plugins/intel_cpu/src/utils/serialize.cpp +++ b/src/plugins/intel_cpu/src/utils/serialize.cpp @@ -14,7 +14,8 @@ namespace intel_cpu { ////////// ModelSerializer ////////// ModelSerializer::ModelSerializer(std::ostream& ostream, CacheEncrypt encrypt_fn) - : m_ostream(ostream), m_cache_encrypt(std::move(encrypt_fn)) {} + : m_ostream(ostream), + m_cache_encrypt(std::move(encrypt_fn)) {} void ModelSerializer::operator<<(const std::shared_ptr& model) { auto serialize_info = [&](std::ostream& stream) { @@ -35,22 +36,25 @@ ModelDeserializer::ModelDeserializer(std::istream& model_stream, ModelBuilder fn, const CacheDecrypt& decrypt_fn, bool decript_from_string) - : m_istream(model_stream), m_model_builder(std::move(fn)), m_decript_from_string(decript_from_string), m_model_buffer(model_buffer) { - if (m_decript_from_string) { - m_cache_decrypt.m_decrypt_str = decrypt_fn.m_decrypt_str; - } else { - m_cache_decrypt.m_decrypt_char = decrypt_fn.m_decrypt_char; - } + : m_istream(model_stream), + m_model_builder(std::move(fn)), + m_decript_from_string(decript_from_string), + m_model_buffer(model_buffer) { + if (m_decript_from_string) { + m_cache_decrypt.m_decrypt_str = decrypt_fn.m_decrypt_str; + } else { + m_cache_decrypt.m_decrypt_char = decrypt_fn.m_decrypt_char; } +} - void ModelDeserializer::set_info(pugi::xml_node& root, std::shared_ptr& model) {} +void ModelDeserializer::set_info(pugi::xml_node& root, std::shared_ptr& model) {} - void ModelDeserializer::operator>>(std::shared_ptr& model) { - if (m_model_buffer) { - process_mmap(model, m_model_buffer); - } else { - process_stream(model); - } +void ModelDeserializer::operator>>(std::shared_ptr& model) { + if (m_model_buffer) { + process_mmap(model, m_model_buffer); + } else { + process_stream(model); + } } void ModelDeserializer::process_mmap(std::shared_ptr& model, @@ -77,7 +81,10 @@ void ModelDeserializer::process_mmap(std::shared_ptr& model, // Read model input/output precisions. pugi::xml_document xml_in_out_doc; if (hdr.custom_data_size > 0lu) { - auto res = xml_in_out_doc.load_buffer(buffer_base + hdr.custom_data_offset, hdr.custom_data_size, pugi::parse_default, pugi::encoding_utf8); + auto res = xml_in_out_doc.load_buffer(buffer_base + hdr.custom_data_offset, + hdr.custom_data_size, + pugi::parse_default, + pugi::encoding_utf8); if (res.status != pugi::status_ok) { OPENVINO_THROW("[CPU] Could to deserialize custom data."); } @@ -86,7 +93,10 @@ void ModelDeserializer::process_mmap(std::shared_ptr& model, // Map blob content std::shared_ptr weights_buf; if (hdr.consts_size) { - weights_buf = std::make_shared>>(buffer_base + hdr.consts_offset, hdr.consts_size, mmemory); + weights_buf = + std::make_shared>>(buffer_base + hdr.consts_offset, + hdr.consts_size, + mmemory); } // XML content @@ -103,9 +113,7 @@ void ModelDeserializer::process_mmap(std::shared_ptr& model, xml_buff->assign(buffer_base + hdr.model_offset, hdr.model_size); } std::shared_ptr model_buf = - std::make_shared>>(&((*xml_buff)[0]), - hdr.model_size, - xml_buff); + std::make_shared>>(&((*xml_buff)[0]), hdr.model_size, xml_buff); model = m_model_builder(model_buf, weights_buf); @@ -150,7 +158,7 @@ void ModelDeserializer::process_stream(std::shared_ptr& model) { auto data_blob = std::make_shared(ov::element::u8, ov::Shape({hdr.consts_size})); m_istream.seekg(hdr.consts_offset); if (hdr.consts_size) { - m_istream.read(static_cast(data_blob->data(ov::element::u8)), hdr.consts_size); + m_istream.read(static_cast(data_blob->data(ov::element::u8)), hdr.consts_size); } // read XML content @@ -162,16 +170,20 @@ void ModelDeserializer::process_stream(std::shared_ptr& model) { if (m_decript_from_string) { *xml_string = m_cache_decrypt.m_decrypt_str(*xml_string); } else { - m_cache_decrypt.m_decrypt_char(const_cast(xml_string->data()), xml_string->data(), xml_string->size()); + m_cache_decrypt.m_decrypt_char(const_cast(xml_string->data()), + xml_string->data(), + xml_string->size()); } } - auto model_buf = std::make_shared>>(const_cast(xml_string->data()), - xml_string->size(), - xml_string); - auto weights_buf = std::make_shared>>(reinterpret_cast(data_blob->data(ov::element::u8)), - hdr.consts_size, - data_blob); + auto model_buf = + std::make_shared>>(const_cast(xml_string->data()), + xml_string->size(), + xml_string); + auto weights_buf = std::make_shared>>( + reinterpret_cast(data_blob->data(ov::element::u8)), + hdr.consts_size, + data_blob); model = m_model_builder(model_buf, weights_buf); @@ -180,5 +192,5 @@ void ModelDeserializer::process_stream(std::shared_ptr& model) { set_info(root, model); } -} // namespace intel_cpu -} // namespace ov +} // namespace intel_cpu +} // namespace ov diff --git a/src/plugins/intel_cpu/src/utils/serialize.hpp b/src/plugins/intel_cpu/src/utils/serialize.hpp index 4dfdd6b22afbd4..0821b1160c38d7 100644 --- a/src/plugins/intel_cpu/src/utils/serialize.hpp +++ b/src/plugins/intel_cpu/src/utils/serialize.hpp @@ -29,7 +29,9 @@ class ModelSerializer { class ModelDeserializer { public: - typedef std::function(const std::shared_ptr&, const std::shared_ptr&)> ModelBuilder; + typedef std::function(const std::shared_ptr&, + const std::shared_ptr&)> + ModelBuilder; ModelDeserializer(std::istream& model, std::shared_ptr model_buffer, @@ -55,5 +57,5 @@ class ModelDeserializer { std::shared_ptr m_model_buffer; }; -} // namespace intel_cpu -} // namespace ov +} // namespace intel_cpu +} // namespace ov diff --git a/src/plugins/intel_npu/src/common/include/intel_npu/common/igraph.hpp b/src/plugins/intel_npu/src/common/include/intel_npu/common/igraph.hpp index 9d12180791e159..365cc35727cf4c 100644 --- a/src/plugins/intel_npu/src/common/include/intel_npu/common/igraph.hpp +++ b/src/plugins/intel_npu/src/common/include/intel_npu/common/igraph.hpp @@ -30,6 +30,8 @@ class BlobContainer { virtual bool release_from_memory() { OPENVINO_THROW("BlobContainer::release_from_memory() method is not implemented!"); } + + virtual ~BlobContainer() = default; }; class BlobContainerVector : public BlobContainer { @@ -147,9 +149,7 @@ class IGraph : public std::enable_shared_from_this { // first inference starts running std::mutex _mutex; -<<<<<<< HEAD -<<<<<<< HEAD - std::vector _blob; + std::unique_ptr _blob; uint32_t _unique_id = 0; uint32_t _last_submitted_id; @@ -161,12 +161,6 @@ class IGraph : public std::enable_shared_from_this { std::optional _batch_size = std::nullopt; Logger _logger; -======= - std::shared_ptr _blob; ->>>>>>> 25b5c05976 (Keep `shared_ptr` of blob in IGraph to fix `export_model` for import scenario) -======= - std::unique_ptr _blob; ->>>>>>> 94e33c4e24 (Add `BlobContainer` class and derivates for each `std::vector` and `std::shared_ptr` blob types) }; } // namespace intel_npu diff --git a/src/plugins/intel_npu/src/common/include/intel_npu/common/npu.hpp b/src/plugins/intel_npu/src/common/include/intel_npu/common/npu.hpp index b53482506360eb..b3846906644e58 100644 --- a/src/plugins/intel_npu/src/common/include/intel_npu/common/npu.hpp +++ b/src/plugins/intel_npu/src/common/include/intel_npu/common/npu.hpp @@ -62,9 +62,6 @@ class ICompilerAdapter { virtual ov::SupportedOpsMap query(const std::shared_ptr& model, const Config& config) const = 0; virtual ~ICompilerAdapter() = default; - -private: - ov::intel_npu::CompilerType _compilerType; }; //------------------------------------------------------------------------------ diff --git a/src/plugins/intel_npu/src/compiler_adapter/src/driver_compiler_adapter.cpp b/src/plugins/intel_npu/src/compiler_adapter/src/driver_compiler_adapter.cpp index 35bc4f3c31213f..37b1774bf17591 100644 --- a/src/plugins/intel_npu/src/compiler_adapter/src/driver_compiler_adapter.cpp +++ b/src/plugins/intel_npu/src/compiler_adapter/src/driver_compiler_adapter.cpp @@ -206,11 +206,13 @@ std::shared_ptr DriverCompilerAdapter::compile(const std::shared_ptr DriverCompilerAdapter::parse(std::unique_ptr blobPtr, const Config& config) const { +std::shared_ptr DriverCompilerAdapter::parse(std::unique_ptr blobPtr, + const Config& config) const { OV_ITT_TASK_CHAIN(PARSE_BLOB, itt::domains::NPUPlugin, "DriverCompilerAdapter", "parse"); _logger.debug("parse start"); - ze_graph_handle_t graphHandle = _zeGraphExt->getGraphHandle(reinterpret_cast(blobPtr->get_ptr()), blobPtr->size()); + ze_graph_handle_t graphHandle = + _zeGraphExt->getGraphHandle(reinterpret_cast(blobPtr->get_ptr()), blobPtr->size()); _logger.debug("parse end"); OV_ITT_TASK_NEXT(PARSE_BLOB, "getNetworkMeta"); diff --git a/src/plugins/intel_npu/src/compiler_adapter/src/driver_graph.cpp b/src/plugins/intel_npu/src/compiler_adapter/src/driver_graph.cpp index 458fa7762658c8..0019eb1bdf17d4 100644 --- a/src/plugins/intel_npu/src/compiler_adapter/src/driver_graph.cpp +++ b/src/plugins/intel_npu/src/compiler_adapter/src/driver_graph.cpp @@ -152,7 +152,7 @@ bool DriverGraph::release_blob(const Config& config) { return false; } - if(!_blob->release_from_memory()) { + if (!_blob->release_from_memory()) { return false; } diff --git a/src/plugins/intel_npu/src/compiler_adapter/src/plugin_compiler_adapter.cpp b/src/plugins/intel_npu/src/compiler_adapter/src/plugin_compiler_adapter.cpp index b7c3d0c75c8f4f..220af24b83c6c1 100644 --- a/src/plugins/intel_npu/src/compiler_adapter/src/plugin_compiler_adapter.cpp +++ b/src/plugins/intel_npu/src/compiler_adapter/src/plugin_compiler_adapter.cpp @@ -91,7 +91,8 @@ std::shared_ptr PluginCompilerAdapter::compile(const std::shared_ptrgetGraphHandle(reinterpret_cast(blobPtr->get_ptr()), blobPtr->size()); + graphHandle = + _zeGraphExt->getGraphHandle(reinterpret_cast(blobPtr->get_ptr()), blobPtr->size()); } catch (...) { _logger.info("Failed to obtain the level zero graph handle. Inference requests for this model are not " "allowed. Only exports are available"); @@ -106,12 +107,14 @@ std::shared_ptr PluginCompilerAdapter::compile(const std::shared_ptr PluginCompilerAdapter::parse(std::unique_ptr blobPtr, const Config& config) const { +std::shared_ptr PluginCompilerAdapter::parse(std::unique_ptr blobPtr, + const Config& config) const { OV_ITT_TASK_CHAIN(PARSE_BLOB, itt::domains::NPUPlugin, "PluginCompilerAdapter", "parse"); _logger.debug("parse start"); std::vector network(blobPtr->size()); - network.assign(reinterpret_cast(blobPtr->get_ptr()), reinterpret_cast(blobPtr->get_ptr()) + blobPtr->size()); + network.assign(reinterpret_cast(blobPtr->get_ptr()), + reinterpret_cast(blobPtr->get_ptr()) + blobPtr->size()); auto networkMeta = _compiler->parse(network, config); network.clear(); network.shrink_to_fit(); @@ -120,7 +123,8 @@ std::shared_ptr PluginCompilerAdapter::parse(std::unique_ptrgetGraphHandle(reinterpret_cast(blobPtr->get_ptr()), blobPtr->size()); + graphHandle = + _zeGraphExt->getGraphHandle(reinterpret_cast(blobPtr->get_ptr()), blobPtr->size()); } return std::make_shared(_zeGraphExt, diff --git a/src/plugins/intel_npu/src/compiler_adapter/src/plugin_graph.cpp b/src/plugins/intel_npu/src/compiler_adapter/src/plugin_graph.cpp index 232d45a8333e8b..ce02e0caad8edd 100644 --- a/src/plugins/intel_npu/src/compiler_adapter/src/plugin_graph.cpp +++ b/src/plugins/intel_npu/src/compiler_adapter/src/plugin_graph.cpp @@ -17,7 +17,10 @@ PluginGraph::PluginGraph(const std::shared_ptr& zeGraphExt, NetworkMetadata metadata, std::unique_ptr blobPtr, const Config& config) - : IGraph(graphHandle, std::move(metadata), config, std::optional>(std::move(blobPtr))), + : IGraph(graphHandle, + std::move(metadata), + config, + std::optional>(std::move(blobPtr))), _zeGraphExt(zeGraphExt), _zeroInitStruct(zeroInitStruct), _compiler(compiler), @@ -40,7 +43,9 @@ void PluginGraph::export_blob(std::ostream& stream) const { if (_logger.level() >= ov::log::Level::INFO) { std::uint32_t result = 1171117u; - for (const uint8_t* it = reinterpret_cast(_blob->get_ptr()); it != reinterpret_cast(_blob->get_ptr()) + _blob->size(); ++it) { + for (const uint8_t* it = reinterpret_cast(_blob->get_ptr()); + it != reinterpret_cast(_blob->get_ptr()) + _blob->size(); + ++it) { result = ((result << 7) + result) + static_cast(*it); } @@ -54,7 +59,8 @@ void PluginGraph::export_blob(std::ostream& stream) const { std::vector PluginGraph::process_profiling_output(const std::vector& profData, const Config& config) const { std::vector blob(_blob->size()); - blob.assign(reinterpret_cast(_blob->get_ptr()), reinterpret_cast(_blob->get_ptr()) + _blob->size()); + blob.assign(reinterpret_cast(_blob->get_ptr()), + reinterpret_cast(_blob->get_ptr()) + _blob->size()); return _compiler->process_profiling_output(profData, blob, config); } diff --git a/src/plugins/intel_npu/src/compiler_adapter/src/ze_graph_ext_wrappers.cpp b/src/plugins/intel_npu/src/compiler_adapter/src/ze_graph_ext_wrappers.cpp index 2fb924ae71b525..5efb973351c55f 100644 --- a/src/plugins/intel_npu/src/compiler_adapter/src/ze_graph_ext_wrappers.cpp +++ b/src/plugins/intel_npu/src/compiler_adapter/src/ze_graph_ext_wrappers.cpp @@ -370,12 +370,8 @@ ze_graph_handle_t ZeGraphExtWrappers::getGraphHandle(const uint8_t* blobData, si OPENVINO_THROW("Empty blob"); } - ze_graph_desc_t desc = {ZE_STRUCTURE_TYPE_GRAPH_DESC_PROPERTIES, - nullptr, - ZE_GRAPH_FORMAT_NATIVE, - blobSize, - blobData, - nullptr}; + ze_graph_desc_t desc = + {ZE_STRUCTURE_TYPE_GRAPH_DESC_PROPERTIES, nullptr, ZE_GRAPH_FORMAT_NATIVE, blobSize, blobData, nullptr}; _logger.debug("getGraphHandle - perform pfnCreate"); auto result = _zeroInitStruct->getGraphDdiTable().pfnCreate(_zeroInitStruct->getContext(), diff --git a/src/plugins/intel_npu/src/plugin/include/metrics.hpp b/src/plugins/intel_npu/src/plugin/include/metrics.hpp index 9dc24908633c5a..e940439d2b3611 100644 --- a/src/plugins/intel_npu/src/plugin/include/metrics.hpp +++ b/src/plugins/intel_npu/src/plugin/include/metrics.hpp @@ -67,7 +67,8 @@ class Metrics final { ov::intel_npu::batch_mode.name(), ov::hint::execution_mode.name()}; - const std::vector _internalSupportedProperties = {ov::internal::caching_properties.name(), ov::internal::caching_with_mmap.name()}; + const std::vector _internalSupportedProperties = {ov::internal::caching_properties.name(), + ov::internal::caching_with_mmap.name()}; // Metric to provide a hint for a range for number of async infer requests. (bottom bound, upper bound, step) const std::tuple _rangeForAsyncInferRequests{1u, 10u, 1u}; diff --git a/src/plugins/intel_npu/src/plugin/src/plugin.cpp b/src/plugins/intel_npu/src/plugin/src/plugin.cpp index 27fe10505a3ce7..785278322f1071 100644 --- a/src/plugins/intel_npu/src/plugin/src/plugin.cpp +++ b/src/plugins/intel_npu/src/plugin/src/plugin.cpp @@ -7,7 +7,6 @@ #include #include "compiled_model.hpp" -#include "npuw/compiled_model.hpp" #include "driver_compiler_adapter.hpp" #include "intel_npu/common/device_helpers.hpp" #include "intel_npu/common/igraph.hpp" @@ -17,6 +16,7 @@ #include "intel_npu/config/npuw.hpp" #include "intel_npu/config/runtime.hpp" #include "intel_npu/utils/zero/zero_init.hpp" +#include "npuw/compiled_model.hpp" #include "openvino/op/constant.hpp" #include "openvino/op/parameter.hpp" #include "openvino/runtime/intel_npu/properties.hpp" @@ -803,8 +803,8 @@ std::shared_ptr Plugin::import_model(std::istream& stream, c } std::shared_ptr Plugin::import_model(std::istream& /* unusedStream */, - std::shared_ptr model_buffer, - const ov::AnyMap& properties) const { + std::shared_ptr model_buffer, + const ov::AnyMap& properties) const { OV_ITT_SCOPED_TASK(itt::domains::NPUPlugin, "Plugin::import_model"); OV_ITT_TASK_CHAIN(PLUGIN_IMPORT_MODEL, itt::domains::NPUPlugin, "Plugin::import_model", "merge_configs");