1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "common/utils.h"
17 #include "hdi_device_v2_0.h"
18 #include "nn_tensor.h"
19 #include "test/unittest/common/v2_0/mock_idevice.h"
20
21 OH_NN_ReturnCode OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
22
23 namespace OHOS {
24 namespace NeuralNetworkRuntime {
IsModelCacheSupported(bool & isSupported)25 OH_NN_ReturnCode HDIDeviceV2_0::IsModelCacheSupported(bool& isSupported)
26 {
27 // isSupported is false when expecting to return success
28 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
29 // In order not to affect other use cases, set to the OH_NN_OPERATION_FORBIDDEN
30 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
31 isSupported = false;
32 return OH_NN_SUCCESS;
33 }
34
35 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
36 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
37 isSupported = false;
38 return OH_NN_FAILED;
39 }
40
41 isSupported = true;
42 return OH_NN_SUCCESS;
43 }
44
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)45 OH_NN_ReturnCode HDIDeviceV2_0::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
46 std::vector<bool>& ops)
47 {
48 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_FILE) {
49 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
50 ops.emplace_back(true);
51 return OH_NN_SUCCESS;
52 }
53
54 if (model == nullptr) {
55 LOGE("HDIDeviceV2_0 mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
56 return OH_NN_NULL_PTR;
57 }
58
59 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
60 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
61 ops.emplace_back(false);
62 return OH_NN_SUCCESS;
63 }
64
65 ops.emplace_back(true);
66 return OH_NN_SUCCESS;
67 }
68
IsDynamicInputSupported(bool & isSupported)69 OH_NN_ReturnCode HDIDeviceV2_0::IsDynamicInputSupported(bool& isSupported)
70 {
71 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
72 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
73 isSupported = false;
74 return OH_NN_FAILED;
75 }
76
77 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_PATH) {
78 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
79 isSupported = false;
80 return OH_NN_SUCCESS;
81 }
82
83 isSupported = true;
84 return OH_NN_SUCCESS;
85 }
86
IsPerformanceModeSupported(bool & isSupported)87 OH_NN_ReturnCode HDIDeviceV2_0::IsPerformanceModeSupported(bool& isSupported)
88 {
89 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
90 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
91 isSupported = false;
92 return OH_NN_FAILED;
93 }
94
95 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
96 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
97 isSupported = false;
98 return OH_NN_SUCCESS;
99 }
100
101 isSupported = true;
102 return OH_NN_SUCCESS;
103 }
104
IsPrioritySupported(bool & isSupported)105 OH_NN_ReturnCode HDIDeviceV2_0::IsPrioritySupported(bool& isSupported)
106 {
107 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_PARAMETER) {
108 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
109 isSupported = false;
110 return OH_NN_INVALID_PARAMETER;
111 }
112
113 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
114 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
115 isSupported = false;
116 return OH_NN_SUCCESS;
117 }
118
119 isSupported = true;
120 return OH_NN_SUCCESS;
121 }
122
IsFloat16PrecisionSupported(bool & isSupported)123 OH_NN_ReturnCode HDIDeviceV2_0::IsFloat16PrecisionSupported(bool& isSupported)
124 {
125 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
126 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
127 isSupported = false;
128 return OH_NN_SUCCESS;
129 }
130
131 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_MEMORY_ERROR) {
132 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
133 isSupported = false;
134 return OH_NN_MEMORY_ERROR;
135 }
136
137 isSupported = true;
138 return OH_NN_SUCCESS;
139 }
140
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)141 OH_NN_ReturnCode HDIDeviceV2_0::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
142 const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel)
143 {
144 if (model == nullptr) {
145 LOGE("HDIDeviceV2_0 mock PrepareModel failed, the model is nullptr");
146 return OH_NN_INVALID_PARAMETER;
147 }
148
149 if (config.enableFloat16 == false) {
150 LOGE("HDIDeviceV2_0 mock PrepareModel failed, the enableFloat16 is false");
151 return OH_NN_FAILED;
152 }
153
154 sptr<OHOS::HDI::Nnrt::V2_0::IPreparedModel> hdiPreparedModel = sptr<OHOS::HDI::Nnrt::V2_0
155 ::MockIPreparedModel>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::MockIPreparedModel());
156 if (hdiPreparedModel == nullptr) {
157 LOGE("HDIDeviceV2_0 mock PrepareModel failed, error happened when new sptr");
158 return OH_NN_NULL_PTR;
159 }
160
161 preparedModel = CreateSharedPtr<HDIPreparedModelV2_0>(hdiPreparedModel);
162 return OH_NN_SUCCESS;
163 }
164
ExportModelCache(std::vector<Buffer> & modelCache)165 OH_NN_ReturnCode HDIPreparedModelV2_0::ExportModelCache(std::vector<Buffer>& modelCache)
166 {
167 if (!modelCache.empty()) {
168 LOGE("HDIPreparedModelV2_0 mock ExportModelCache failed, the modelCache is not empty");
169 return OH_NN_INVALID_PARAMETER;
170 }
171
172 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
173 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
174 return OH_NN_FAILED;
175 }
176
177 int bufferSize = 13;
178 Buffer buffer;
179 std::string aBuffer = "mock_buffer_a";
180 buffer.data = const_cast<void*>(static_cast<const void*>(aBuffer.c_str()));
181 buffer.length = bufferSize;
182 modelCache.emplace_back(buffer);
183
184 Buffer buffer2;
185 std::string bBuffer = "mock_buffer_b";
186 buffer2.data = const_cast<void*>(static_cast<const void*>(bBuffer.c_str()));
187 buffer2.length = bufferSize;
188 modelCache.emplace_back(buffer2);
189
190 return OH_NN_SUCCESS;
191 }
192
AllocateBuffer(size_t length)193 void* HDIDeviceV2_0::AllocateBuffer(size_t length)
194 {
195 if (length == 0) {
196 LOGE("HDIDeviceV2_0 mock AllocateBuffer failed, the length param is invalid");
197 return nullptr;
198 }
199
200 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_NULL_PTR) {
201 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
202 return nullptr;
203 }
204
205 void* buffer = malloc(length);
206 if (buffer == nullptr) {
207 LOGE("HDIDeviceV2_0 mock AllocateBuffer failed, the buffer is nullptr");
208 return nullptr;
209 }
210 return buffer;
211 }
212
ReleaseBuffer(const void * buffer)213 OH_NN_ReturnCode HDIDeviceV2_0::ReleaseBuffer(const void* buffer)
214 {
215 if (buffer == nullptr) {
216 LOGE("HDIDeviceV2_0 mock ReleaseBuffer failed, the buffer is nullptr");
217 return OH_NN_NULL_PTR;
218 }
219
220 free(const_cast<void *>(buffer));
221 buffer = nullptr;
222 return OH_NN_SUCCESS;
223 }
224
PrepareModelFromModelCache(const std::vector<Buffer> & modelCache,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel,bool & isUpdatable)225 OH_NN_ReturnCode HDIDeviceV2_0::PrepareModelFromModelCache(const std::vector<Buffer>& modelCache,
226 const ModelConfig& config, std::shared_ptr<PreparedModel>& preparedModel, bool& isUpdatable)
227 {
228 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
229 HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
230 return OH_NN_FAILED;
231 }
232
233 if (modelCache.size() == 0 || config.enableFloat16 == false) {
234 LOGE("HDIDeviceV2_0 mock PrepareModel failed, the modelCache size equals 0 or enableFloat16 is false");
235 return OH_NN_FAILED;
236 }
237
238 sptr<OHOS::HDI::Nnrt::V2_0::IPreparedModel> hdiPreparedModel = sptr<OHOS::HDI::Nnrt::V2_0
239 ::MockIPreparedModel>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::MockIPreparedModel());
240 if (hdiPreparedModel == nullptr) {
241 LOGE("HDIDeviceV2_0 mock PrepareModelFromModelCache failed, error happened when new sptr");
242 return OH_NN_NULL_PTR;
243 }
244
245 preparedModel = CreateSharedPtr<HDIPreparedModelV2_0>(hdiPreparedModel);
246
247 return OH_NN_SUCCESS;
248 }
249
IsDynamicShape() const250 bool NNTensor::IsDynamicShape() const
251 {
252 if (HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
253 return false;
254 }
255
256 return true;
257 }
258 } // namespace NeuralNetworkRuntime
259 } // namespace OHOS