Skip to content

Commit 939b35a

Browse files
authored
[NPU]Change NPUBackend log (#27073)
### Details: [log.error of NPUbackend](https://github.com/openvinotoolkit/openvino/blob/master/src/plugins/intel_npu/src/plugin/src/backends.cpp#L130) will confuse the user, thought NPU backend does not impact the compilation and just impact the inference stage. Now [in the inference stage in compiledmodel part](https://github.com/openvinotoolkit/openvino/pull/27073/files#diff-74bc81bb7b258118f04e81468e3ec3b05e65e714546d32246bae45eb892f6abcR125-R130), will get a log.error output when no npu device is checked. ### Tickets: - 153439
1 parent 4bf52c1 commit 939b35a

File tree

2 files changed

+13
-3
lines changed

2 files changed

+13
-3
lines changed

src/plugins/intel_npu/src/plugin/src/backends.cpp

+3-2
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ NPUBackends::NPUBackends(const std::vector<AvailableBackends>& backendRegistry,
111111
} catch (const std::exception& ex) {
112112
_logger.warning("Got an error during backend '%s' loading : %s", backendName.c_str(), ex.what());
113113
} catch (...) {
114-
_logger.error("Got an unknown error during backend '%s' loading", backendName.c_str());
114+
_logger.warning("Got an unknown error during backend '%s' loading", backendName.c_str());
115115
}
116116
}
117117

@@ -127,7 +127,8 @@ NPUBackends::NPUBackends(const std::vector<AvailableBackends>& backendRegistry,
127127
if (_backend != nullptr) {
128128
_logger.info("Use '%s' backend for inference", _backend->getName().c_str());
129129
} else {
130-
_logger.error("Cannot find backend for inference. Make sure the device is available.");
130+
_logger.warning("None of the backends were initialized successfully."
131+
"Only offline compilation can be done!");
131132
}
132133
}
133134

src/plugins/intel_npu/src/plugin/src/compiled_model.cpp

+10-1
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@
2424
namespace {
2525

2626
constexpr std::string_view NO_EXECUTOR_FOR_INFERENCE =
27+
"Can't create infer request due to create executor failed! Only exports can be made.";
28+
29+
constexpr std::string_view NO_EXECUTOR_FOR_INFERENCE_NODEVICE =
2730
"Can't create infer request!\n"
2831
"Please make sure that the device is available. Only exports can be made.";
2932

@@ -118,8 +121,14 @@ std::shared_ptr<ov::IAsyncInferRequest> CompiledModel::create_infer_request() co
118121
if (_executorPtr == nullptr && _device != nullptr) {
119122
_executorPtr = _device->createExecutor(_networkPtr, _config);
120123
}
124+
121125
if (_executorPtr == nullptr) {
122-
OPENVINO_THROW(NO_EXECUTOR_FOR_INFERENCE);
126+
if (_device != nullptr) {
127+
OPENVINO_THROW(NO_EXECUTOR_FOR_INFERENCE);
128+
} else {
129+
_logger.error("Can not find device!");
130+
OPENVINO_THROW(NO_EXECUTOR_FOR_INFERENCE_NODEVICE);
131+
}
123132
}
124133

125134
const std::shared_ptr<SyncInferRequest>& syncInferRequest =

0 commit comments

Comments
 (0)