1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "nnbackend.h"
17
18 #include <new>
19 #include "common/log.h"
20 #include "common/utils.h"
21 #include "nncompiler.h"
22 #include "nnexecutor.h"
23 #include "nntensor.h"
24 #include "tensor_desc.h"
25 #include "device.h"
26
27
28 namespace OHOS {
29 namespace NeuralNetworkRuntime {
NNBackend(const std::shared_ptr<Device> & device,size_t backendID)30 NNBackend::NNBackend(const std::shared_ptr<Device>& device, size_t backendID)
31 : m_device(device),
32 m_backendID(backendID) {}
33
~NNBackend()34 NNBackend::~NNBackend()
35 {
36 m_device = nullptr;
37 }
38
GetBackendID() const39 size_t NNBackend::GetBackendID() const
40 {
41 return m_backendID;
42 }
43
GetBackendName(std::string & backendName) const44 OH_NN_ReturnCode NNBackend::GetBackendName(std::string& backendName) const
45 {
46 if (m_device == nullptr) {
47 LOGE("[NNBackend] GetBackendName failed, m_device is nullptr");
48 return OH_NN_FAILED;
49 }
50
51 std::string deviceName;
52 OH_NN_ReturnCode ret = m_device->GetDeviceName(deviceName);
53 if (ret != OH_NN_SUCCESS) {
54 LOGE("[NNBackend] GetBackendName failed, get device name failed.");
55 return ret;
56 }
57
58 std::string vendorName;
59 ret = m_device->GetVendorName(vendorName);
60 if (ret != OH_NN_SUCCESS) {
61 LOGE("[NNBackend] GetBackendName failed, get vendor name failed.");
62 return ret;
63 }
64
65 std::string version;
66 ret = m_device->GetVersion(version);
67 if (ret != OH_NN_SUCCESS) {
68 LOGE("[NNBackend] GetBackendName failed, get version failed.");
69 return ret;
70 }
71
72 backendName = GenUniqueName(deviceName, vendorName, version);
73 return OH_NN_SUCCESS;
74 }
75
GetBackendType(OH_NN_DeviceType & backendType) const76 OH_NN_ReturnCode NNBackend::GetBackendType(OH_NN_DeviceType& backendType) const
77 {
78 if (m_device == nullptr) {
79 LOGE("[NNBackend] GetBackendType failed, m_device is nullptr");
80 return OH_NN_FAILED;
81 }
82
83 OH_NN_ReturnCode ret = m_device->GetDeviceType(backendType);
84 if (ret != OH_NN_SUCCESS) {
85 LOGE("[NNBackend] GetBackendType failed, fail to get device type");
86 return ret;
87 }
88
89 return OH_NN_SUCCESS;
90 }
91
GetBackendStatus(DeviceStatus & status) const92 OH_NN_ReturnCode NNBackend::GetBackendStatus(DeviceStatus& status) const
93 {
94 if (m_device == nullptr) {
95 LOGE("[NNBackend] GetBackendStatus failed, m_device is nullptr");
96 return OH_NN_FAILED;
97 }
98
99 OH_NN_ReturnCode ret = m_device->GetDeviceStatus(status);
100 if (ret != OH_NN_SUCCESS) {
101 LOGE("[NNBackend] GetBackendStatus failed, fail to get device status");
102 return ret;
103 }
104 return OH_NN_SUCCESS;
105 }
106
CreateCompiler(Compilation * compilation)107 Compiler* NNBackend::CreateCompiler(Compilation* compilation)
108 {
109 if (compilation == nullptr) {
110 LOGE("[NNBackend] CreateCompiler failed, compilation is nullptr");
111 return nullptr;
112 }
113
114 // 仅支持从nnmodel 和 nnmodel-cache构建编译器
115 if ((compilation->offlineModelPath != nullptr) ||
116 ((compilation->offlineModelBuffer.first != nullptr) ||
117 (compilation->offlineModelBuffer.second != static_cast<size_t>(0)))) {
118 LOGE("[NNBackend] CreateCompiler failed, only support build NN model and NN model cache.");
119 return nullptr;
120 }
121
122 // 如果nnmodel是空值,构建空的编译器,后续从cache编译模型,
123 // 如果nnmodel不为空,则从对应模型构建编译器
124 NNCompiler* nnCompiler = nullptr;
125 if (compilation->nnModel == nullptr) {
126 nnCompiler = new (std::nothrow) NNCompiler(m_device, m_backendID);
127 } else {
128 nnCompiler = new (std::nothrow) NNCompiler(compilation->nnModel, m_device, m_backendID);
129 }
130
131 if (nnCompiler == nullptr) {
132 LOGE("[NNBackend] CreateCompiler failed, error happend when allocating NN Compiler.");
133 return nullptr;
134 }
135
136 return reinterpret_cast<Compiler*>(nnCompiler);
137 }
138
DestroyCompiler(Compiler * compiler)139 OH_NN_ReturnCode NNBackend::DestroyCompiler(Compiler* compiler)
140 {
141 if (compiler == nullptr) {
142 LOGE("[NNBackend] DestroyCompiler failed, compiler is nullptr.");
143 return OH_NN_INVALID_PARAMETER;
144 }
145
146 delete compiler;
147 return OH_NN_SUCCESS;
148 }
149
CreateExecutor(Compilation * compilation)150 Executor* NNBackend::CreateExecutor(Compilation* compilation)
151 {
152 if (compilation == nullptr) {
153 LOGE("[NNBackend] CreateExecutor failed, compilation is nullptr.");
154 return nullptr;
155 }
156
157 if (compilation->compiler == nullptr) {
158 LOGE("[NNBackend] CreateExecutor failed, the compiler in compilation is nullptr, create complier first.");
159 return nullptr;
160 }
161
162 NNCompiler* nnCompiler = reinterpret_cast<NNCompiler*>(compilation->compiler);
163 NNExecutor* nnExecutor = nnCompiler->CreateExecutor();
164 if (nnExecutor == nullptr) {
165 LOGE("[NNBackend] CreateExecutor failed, fail to create NN Executor.");
166 return nullptr;
167 }
168
169 return reinterpret_cast<Executor*>(nnExecutor);
170 }
171
DestroyExecutor(Executor * executor)172 OH_NN_ReturnCode NNBackend::DestroyExecutor(Executor* executor)
173 {
174 if (executor == nullptr) {
175 LOGE("[NNBackend] DestroyExecutor failed, executor is nullptr.");
176 return OH_NN_INVALID_PARAMETER;
177 }
178
179 delete executor;
180 return OH_NN_SUCCESS;
181 }
182
CreateTensor(TensorDesc * desc)183 Tensor* NNBackend::CreateTensor(TensorDesc* desc)
184 {
185 if (desc == nullptr) {
186 LOGE("[NNBackend] CreateTensor failed, tensor desc is nullptr.");
187 return nullptr;
188 }
189
190 NNTensor2_0* tensorImpl = new (std::nothrow) NNTensor2_0(m_backendID);
191 if (tensorImpl == nullptr) {
192 LOGE("[NNBackend] CreateTensor failed, error happend when allocating NN Tensor.");
193 return nullptr;
194 }
195
196 auto ret = tensorImpl->SetTensorDesc(desc);
197 if (ret != OH_NN_SUCCESS) {
198 LOGE("[NNBackend] CreateTensor failed, error happend when setting tensor desc.");
199 delete tensorImpl;
200 return nullptr;
201 }
202
203 return reinterpret_cast<Tensor*>(tensorImpl);
204 }
205
DestroyTensor(Tensor * tensor)206 OH_NN_ReturnCode NNBackend::DestroyTensor(Tensor* tensor)
207 {
208 if (tensor == nullptr) {
209 LOGE("[NNBackend] DestroyTensor failed, tensor is nullptr.");
210 return OH_NN_INVALID_PARAMETER;
211 }
212
213 delete tensor;
214 return OH_NN_SUCCESS;
215 }
216
GetDevice() const217 std::shared_ptr<Device> NNBackend::GetDevice() const
218 {
219 if (m_device == nullptr) {
220 LOGE("[NNBackend] GetDevice failed, m_device is nullptr.");
221 }
222 return m_device;
223 }
224
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)225 OH_NN_ReturnCode NNBackend::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
226 std::vector<bool>& ops)
227 {
228 if (model == nullptr) {
229 LOGE("[NNBackend] GetSupportedOperation failed, model is nullptr.");
230 return OH_NN_INVALID_PARAMETER;
231 }
232
233 if (m_device == nullptr) {
234 LOGE("[NNBackend] GetSupportedOperation failed, device is nullptr, some error happend.");
235 return OH_NN_FAILED;
236 }
237
238 OH_NN_ReturnCode ret = m_device->GetSupportedOperation(model, ops);
239 if (ret != OH_NN_SUCCESS) {
240 LOGE("[NNBackend] GetSupportedOperation failed, fail to get supported ops from device.");
241 return OH_NN_FAILED;
242 }
243
244 return OH_NN_SUCCESS;
245 }
246 } // NeuralNetworkRuntime
247 } // OHOS