7
7
*/
8
8
9
9
#include < cstring>
10
- #include < memory>
11
10
#include < iostream>
11
+ #include < memory>
12
12
13
13
#include < openvino/openvino.hpp>
14
14
@@ -39,143 +39,159 @@ namespace backends {
39
39
namespace openvino {
40
40
41
41
OpenvinoBackend::OpenvinoBackend () {
42
- if (!is_available ()) {
43
- // ET_LOG(Error, "OpenVINO runtime is not available. Initialization failed.");
44
- throw std::runtime_error (" OpenVINO runtime not available" );
45
- }
42
+ if (!is_available ()) {
43
+ // ET_LOG(Error, "OpenVINO runtime is not available. Initialization
44
+ // failed.");
45
+ throw std::runtime_error (" OpenVINO runtime not available" );
46
+ }
46
47
47
- // ET_LOG(Info, "OpenVINO runtime successfully verified and initialized.");
48
+ // ET_LOG(Info, "OpenVINO runtime successfully verified and initialized.");
48
49
}
49
50
50
51
bool OpenvinoBackend::is_available () const {
51
- try {
52
- // Create an OpenVINO Core object to verify runtime availability
53
- ov::Core core;
54
-
55
- // Check if at least one device is available
56
- auto devices = core.get_available_devices ();
57
- if (!devices.empty ()) {
58
- return true ; // OpenVINO is available
59
- }
60
- } catch (const std::exception& e) {
61
- // Log the exception if OpenVINO runtime is not available
62
- ET_LOG (Error, " OpenVINO is not available: %s" , e.what ());
63
- } catch (...) {
64
- // Handle any unexpected errors
65
- ET_LOG (Error, " OpenVINO availability check failed due to an unknown error." );
66
- }
52
+ try {
53
+ // Create an OpenVINO Core object to verify runtime availability
54
+ ov::Core core;
67
55
68
- return false ; // OpenVINO is not available
56
+ // Check if at least one device is available
57
+ auto devices = core.get_available_devices ();
58
+ if (!devices.empty ()) {
59
+ return true ; // OpenVINO is available
60
+ }
61
+ } catch (const std::exception& e) {
62
+ // Log the exception if OpenVINO runtime is not available
63
+ ET_LOG (Error, " OpenVINO is not available: %s" , e.what ());
64
+ } catch (...) {
65
+ // Handle any unexpected errors
66
+ ET_LOG (
67
+ Error, " OpenVINO availability check failed due to an unknown error." );
68
+ }
69
+
70
+ return false ; // OpenVINO is not available
69
71
}
70
72
71
73
Result<DelegateHandle*> OpenvinoBackend::init (
72
74
BackendInitContext& context,
73
75
FreeableBuffer* processed,
74
76
ArrayRef<CompileSpec> compile_specs) const {
77
+ ET_LOG (Info, " OpenvinoBackend::init %p" , processed->data ());
75
78
76
- ET_LOG (Info, " OpenvinoBackend::init %p" , processed->data ());
79
+ ov::Core core;
80
+ const char * data_ptr = static_cast <const char *>(processed->data ());
81
+ size_t data_size = processed->size ();
77
82
78
- ov::Core core;
79
- const char * data_ptr = static_cast <const char *>(processed->data ());
80
- size_t data_size = processed->size ();
83
+ // Copy data to a string or vector
84
+ std::string data_string (data_ptr, data_size);
81
85
82
- // Copy data to a string or vector
83
- std::string data_string (data_ptr, data_size );
86
+ // Wrap the data in a stream
87
+ std::istringstream compiled_stream (data_string );
84
88
85
- // Wrap the data in a stream
86
- std::istringstream compiled_stream (data_string);
89
+ auto device = " CPU" ;
90
+ // Get the device value, if provided in compile sepcs
91
+ for (auto & compile_spec : compile_specs) {
92
+ if (std::strcmp (compile_spec.key , " device" ) == 0 )
93
+ device = static_cast <char *>(compile_spec.value .buffer );
94
+ }
87
95
88
- // Import the model
89
- auto compiled_model = core.import_model (compiled_stream, " CPU " );
96
+ // Import the model
97
+ auto compiled_model = core.import_model (compiled_stream, device );
90
98
91
- // Allocate an infer request
92
- std::shared_ptr<ov::InferRequest> infer_request = std::make_shared<ov::InferRequest>(compiled_model.create_infer_request ());
99
+ // Allocate an infer request
100
+ std::shared_ptr<ov::InferRequest> infer_request =
101
+ std::make_shared<ov::InferRequest>(compiled_model.create_infer_request ());
93
102
94
- // Allocate execution handle
95
- MemoryAllocator* allocator = context.get_runtime_allocator ();
96
- ExecutionHandle* handle = ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR (allocator, ExecutionHandle);
97
- handle->compiled_model = std::make_shared<ov::CompiledModel>(compiled_model);
98
- handle->infer_request = infer_request;
103
+ // Allocate execution handle
104
+ MemoryAllocator* allocator = context.get_runtime_allocator ();
105
+ ExecutionHandle* handle =
106
+ ET_ALLOCATE_INSTANCE_OR_RETURN_ERROR (allocator, ExecutionHandle);
107
+ handle->compiled_model = std::make_shared<ov::CompiledModel>(compiled_model);
108
+ handle->infer_request = infer_request;
99
109
100
- return handle;
110
+ return handle;
101
111
}
102
112
103
113
Error OpenvinoBackend::execute (
104
114
BackendExecutionContext& context,
105
115
DelegateHandle* input_handle,
106
116
EValue** args) const {
117
+ ExecutionHandle* execution_handle = (ExecutionHandle*)input_handle;
107
118
108
- ExecutionHandle* execution_handle = (ExecutionHandle*)input_handle ;
119
+ auto infer_request = execution_handle-> infer_request ;
109
120
110
- auto infer_request = execution_handle->infer_request ;
121
+ size_t num_inputs = infer_request->get_compiled_model ().inputs ().size ();
122
+ size_t num_outputs = infer_request->get_compiled_model ().outputs ().size ();
111
123
112
- size_t num_inputs = infer_request->get_compiled_model ().inputs ().size ();
113
- size_t num_outputs = infer_request->get_compiled_model ().outputs ().size ();
124
+ // Set inputs
125
+ for (size_t i = 0 ; i < num_inputs; i++) {
126
+ auto input_tensor = args[i]->toTensor ();
127
+ ov::Shape input_shape (
128
+ input_tensor.sizes ().begin (), input_tensor.sizes ().end ());
114
129
115
- // Set inputs
116
- for (size_t i = 0 ; i < num_inputs; i++) {
117
- auto input_tensor = args[i]->toTensor ();
118
- ov::Shape input_shape (input_tensor.sizes ().begin (), input_tensor.sizes ().end ());
130
+ // Convert input tensor to OpenVINO tensor
131
+ ov::element::Type ov_type =
132
+ convert_to_openvino_type (input_tensor.scalar_type ());
133
+ ov::Tensor ov_input_tensor (
134
+ ov_type, input_shape, input_tensor.mutable_data_ptr ());
119
135
120
- // Convert input tensor to OpenVINO tensor
121
- ov::element::Type ov_type = convert_to_openvino_type (input_tensor.scalar_type ());
122
- ov::Tensor ov_input_tensor (ov_type, input_shape, input_tensor.mutable_data_ptr ());
136
+ infer_request->set_input_tensor (i, ov_input_tensor);
137
+ }
123
138
124
- infer_request->set_input_tensor (i, ov_input_tensor);
125
- }
139
+ // Set outputs
140
+ for (size_t i = 0 ; i < num_outputs; i++) {
141
+ auto output_tensor = args[num_inputs + i]->toTensor ();
142
+ ov::Shape output_shape (
143
+ output_tensor.sizes ().begin (), output_tensor.sizes ().end ());
126
144
127
- // Set outputs
128
- for (size_t i = 0 ; i < num_outputs; i++) {
129
- auto output_tensor = args[num_inputs+i]->toTensor ();
130
- ov::Shape output_shape (output_tensor.sizes ().begin (), output_tensor.sizes ().end ());
145
+ // Convert input tensor to OpenVINO tensor
146
+ ov::element::Type ov_type =
147
+ convert_to_openvino_type (output_tensor.scalar_type ());
148
+ ov::Tensor ov_output_tensor (
149
+ ov_type, output_shape, output_tensor.mutable_data_ptr ());
131
150
132
- // Convert input tensor to OpenVINO tensor
133
- ov::element::Type ov_type = convert_to_openvino_type (output_tensor.scalar_type ());
134
- ov::Tensor ov_output_tensor (ov_type, output_shape, output_tensor.mutable_data_ptr ());
135
-
136
- infer_request->set_output_tensor (i, ov_output_tensor);
137
- }
151
+ infer_request->set_output_tensor (i, ov_output_tensor);
152
+ }
138
153
139
- // Execute the inference
140
- infer_request->infer ();
154
+ // Execute the inference
155
+ infer_request->infer ();
141
156
142
- return Error::Ok;
157
+ return Error::Ok;
143
158
}
144
159
145
160
void OpenvinoBackend::destroy (DelegateHandle* handle) const {
146
- if (!handle) {
147
- ET_LOG (Info, " Attempted to destroy a null handle." );
148
- return ;
149
- }
150
-
151
- // Cast the handle to the appropriate type
152
- ExecutionHandle* execution_handle = static_cast <ExecutionHandle*>(handle);
153
-
154
- // Clean up resources
155
- if (execution_handle->infer_request ) {
156
- execution_handle->infer_request .reset (); // Release the infer request
157
- ET_LOG (Info, " Infer request successfully destroyed." );
158
- }
159
-
160
- if (execution_handle->compiled_model ) {
161
- execution_handle->compiled_model .reset (); // Release the compiled model
162
- ET_LOG (Info, " Compiled model successfully destroyed." );
163
- }
164
-
165
- ET_LOG (Info, " Delegate handle destroyed successfully." );
161
+ if (!handle) {
162
+ ET_LOG (Info, " Attempted to destroy a null handle." );
163
+ return ;
164
+ }
165
+
166
+ // Cast the handle to the appropriate type
167
+ ExecutionHandle* execution_handle = static_cast <ExecutionHandle*>(handle);
168
+
169
+ // Clean up resources
170
+ if (execution_handle->infer_request ) {
171
+ execution_handle->infer_request .reset (); // Release the infer request
172
+ ET_LOG (Info, " Infer request successfully destroyed." );
173
+ }
174
+
175
+ if (execution_handle->compiled_model ) {
176
+ execution_handle->compiled_model .reset (); // Release the compiled model
177
+ ET_LOG (Info, " Compiled model successfully destroyed." );
178
+ }
179
+
180
+ ET_LOG (Info, " Delegate handle destroyed successfully." );
166
181
}
167
182
168
- ov::element::Type OpenvinoBackend::convert_to_openvino_type (ScalarType scalar_type) const {
169
- switch (scalar_type) {
170
- case ScalarType::Float:
171
- return ov::element::f32 ;
172
- case ScalarType::Int:
173
- return ov::element::i32 ;
174
- case ScalarType::Char:
175
- return ov::element::i8 ;
176
- default :
177
- throw std::runtime_error (" Unsupported scalar type" );
178
- }
183
+ ov::element::Type OpenvinoBackend::convert_to_openvino_type (
184
+ ScalarType scalar_type) const {
185
+ switch (scalar_type) {
186
+ case ScalarType::Float:
187
+ return ov::element::f32 ;
188
+ case ScalarType::Int:
189
+ return ov::element::i32 ;
190
+ case ScalarType::Char:
191
+ return ov::element::i8 ;
192
+ default :
193
+ throw std::runtime_error (" Unsupported scalar type" );
194
+ }
179
195
}
180
196
181
197
} // namespace openvino
@@ -185,7 +201,5 @@ ov::element::Type OpenvinoBackend::convert_to_openvino_type(ScalarType scalar_ty
185
201
namespace {
186
202
auto backend = executorch::backends::openvino::OpenvinoBackend();
187
203
executorch::runtime::Backend backend_id{" OpenvinoBackend" , &backend};
188
- static auto registered = executorch::runtime::register_backend(backend_id);
204
+ static auto registered = executorch::runtime::register_backend(backend_id);
189
205
} // namespace
190
-
191
-
0 commit comments