1 /*
2  * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "interfaces/innerkits/c/neural_network_runtime_inner.h"
17 #include "interfaces/kits/c/neural_network_runtime/neural_network_runtime.h"
18 
19 #include "compilation.h"
20 #include "executor.h"
21 #include "inner_model.h"
22 #include "common/log.h"
23 #include "quant_param.h"
24 #include "validation.h"
25 #include "syspara/parameter.h"
26 #include "securec.h"
27 
28 #include <cstring>
29 #include <fstream>
30 #include <filesystem>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 
34 using namespace OHOS::NeuralNetworkRuntime;
35 
36 #define NNRT_API __attribute__((visibility("default")))
37 
38 const std::string EXTENSION_KEY_QUANT_BUFFER = "QuantBuffer";
39 const std::string EXTENSION_KEY_MODEL_NAME = "ModelName";
40 const std::string EXTENSION_KEY_IS_PROFILING = "isProfiling";
41 const std::string EXTENSION_KEY_OP_LAYOUT = "opLayout";
42 const std::string EXTENSION_KEY_INPUT_DIMS = "InputDims";
43 const std::string EXTENSION_KEY_DYNAMIC_DIMS = "DynamicDims";
44 const std::string EXTENSION_KEY_FM_SHARED = "NPU_FM_SHARED";
45 
46 const std::string NULL_HARDWARE_NAME = "default";
47 const std::string HARDWARE_NAME = "const.ai.nnrt_deivce";
48 const std::string HARDWARE_VERSION = "v5_0";
49 constexpr size_t HARDWARE_NAME_MAX_LENGTH = 128;
50 constexpr size_t FILE_NUMBER_MAX = 100; // 限制cache文件数量最大为100
51 
OH_NNQuantParam_Create()52 NNRT_API NN_QuantParam *OH_NNQuantParam_Create()
53 {
54     auto* quantParamImpl = new (std::nothrow) QuantParams();
55     if (quantParamImpl == nullptr) {
56         LOGE("OH_NNQuantParam_Create failed, please check whether it has enough memory.");
57         return nullptr;
58     }
59 
60     return (NN_QuantParam*)(quantParamImpl);
61 }
62 
OH_NNQuantParam_SetScales(NN_QuantParam * quantParams,const double * scales,size_t quantNum)63 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetScales(NN_QuantParam* quantParams, const double* scales, size_t quantNum)
64 {
65     if (quantParams == nullptr) {
66         LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to quantParams.");
67         return OH_NN_INVALID_PARAMETER;
68     }
69 
70     if (scales == nullptr) {
71         LOGE("OH_NNQuantParam_SetScales failed, passed nullptr to scales.");
72         return OH_NN_INVALID_PARAMETER;
73     }
74 
75     if (quantNum == 0) {
76         LOGE("OH_NNQuantParam_SetScales failed, passed 0 to quantNum.");
77         return OH_NN_INVALID_PARAMETER;
78     }
79 
80     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
81     std::vector<double> scaleVector(scales, scales + quantNum);
82     quantParamImpl->SetScales(scaleVector);
83 
84     return OH_NN_SUCCESS;
85 }
86 
OH_NNQuantParam_SetZeroPoints(NN_QuantParam * quantParams,const int32_t * zeroPoints,size_t quantNum)87 NNRT_API OH_NN_ReturnCode OH_NNQuantParam_SetZeroPoints(NN_QuantParam* quantParams,
88                                                         const int32_t* zeroPoints,
89                                                         size_t quantNum)
90 {
91     if (quantParams == nullptr) {
92         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to quantParams.");
93         return OH_NN_INVALID_PARAMETER;
94     }
95 
96     if (zeroPoints == nullptr) {
97         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed nullptr to zeroPoints.");
98         return OH_NN_INVALID_PARAMETER;
99     }
100 
101     if (quantNum == 0) {
102         LOGE("OH_NNQuantParam_SetZeroPoints failed, passed 0 to quantNum.");
103         return OH_NN_INVALID_PARAMETER;
104     }
105 
106     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
107     std::vector<int32_t> zeroPointVector(zeroPoints, zeroPoints + quantNum);
108     quantParamImpl->SetZeroPoints(zeroPointVector);
109 
110     return OH_NN_SUCCESS;
111 }
112 
OH_NNQuantParam_SetNumBits(NN_QuantParam * quantParams,const uint32_t * numBits,size_t quantNum)113 OH_NN_ReturnCode OH_NNQuantParam_SetNumBits(NN_QuantParam* quantParams, const uint32_t* numBits, size_t quantNum)
114 {
115     if (quantParams == nullptr) {
116         LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to quantParams.");
117         return OH_NN_INVALID_PARAMETER;
118     }
119 
120     if (numBits == nullptr) {
121         LOGE("OH_NNQuantParam_SetNumBits failed, passed nullptr to numBits.");
122         return OH_NN_INVALID_PARAMETER;
123     }
124 
125     if (quantNum == 0) {
126         LOGE("OH_NNQuantParam_SetNumBits failed, passed 0 to quantNum.");
127         return OH_NN_INVALID_PARAMETER;
128     }
129 
130     auto* quantParamImpl = reinterpret_cast<QuantParams*>(quantParams);
131     std::vector<uint32_t> numBitVector(numBits, numBits + quantNum);
132     quantParamImpl->SetNumBits(numBitVector);
133 
134     return OH_NN_SUCCESS;
135 }
136 
OH_NNQuantParam_Destroy(NN_QuantParam ** quantParams)137 OH_NN_ReturnCode OH_NNQuantParam_Destroy(NN_QuantParam** quantParams)
138 {
139     if (quantParams == nullptr) {
140         LOGE("OH_NNQuantParam_Destroy failed, passed nullptr to quantParams.");
141         return OH_NN_INVALID_PARAMETER;
142     }
143 
144     if (*quantParams == nullptr) {
145         LOGW("OH_NNQuantParam_Destroy failed, passed nullptr to *quantParams.");
146         return OH_NN_INVALID_PARAMETER;
147     }
148 
149     auto* quantParamImpl = reinterpret_cast<QuantParams*>(*quantParams);
150     delete quantParamImpl;
151     *quantParams = nullptr;
152 
153     return OH_NN_SUCCESS;
154 }
155 
OH_NNModel_AddTensorToModel(OH_NNModel * model,const NN_TensorDesc * tensorDesc)156 OH_NN_ReturnCode OH_NNModel_AddTensorToModel(OH_NNModel* model, const NN_TensorDesc* tensorDesc)
157 {
158     if (model == nullptr) {
159         LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to model.");
160         return OH_NN_INVALID_PARAMETER;
161     }
162 
163     if (tensorDesc == nullptr) {
164         LOGE("OH_NNModel_AddTensorToModel failed, passed nullptr to tensorDesc.");
165         return OH_NN_INVALID_PARAMETER;
166     }
167 
168     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
169     OH_NN_ReturnCode returnCode = innerModel->AddTensorDesc(tensorDesc);
170     if (returnCode != OH_NN_SUCCESS) {
171         LOGE("OH_NNModel_AddTensorToModel failed, error happened when adding tensor to model.");
172     }
173 
174     return returnCode;
175 }
176 
OH_NNModel_SetTensorQuantParams(OH_NNModel * model,uint32_t index,NN_QuantParam * quantParam)177 OH_NN_ReturnCode OH_NNModel_SetTensorQuantParams(OH_NNModel* model, uint32_t index, NN_QuantParam* quantParam)
178 {
179     if (model == nullptr) {
180         LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to model.");
181         return OH_NN_INVALID_PARAMETER;
182     }
183 
184     if (quantParam == nullptr) {
185         LOGE("OH_NNModel_SetTensorQuantParams failed, passed nullptr to quantParam.");
186         return OH_NN_INVALID_PARAMETER;
187     }
188 
189     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
190     OH_NN_ReturnCode returnCode = innerModel->SetTensorQuantParam((uint32_t)(index), quantParam);
191     if (returnCode != OH_NN_SUCCESS) {
192         LOGE("OH_NNModel_SetTensorQuantParams failed, error happened when setting tensor quantParam.");
193     }
194 
195     return returnCode;
196 }
197 
OH_NNModel_SetTensorType(OH_NNModel * model,uint32_t index,OH_NN_TensorType tensorType)198 OH_NN_ReturnCode OH_NNModel_SetTensorType(OH_NNModel* model, uint32_t index, OH_NN_TensorType tensorType)
199 {
200     if (model == nullptr) {
201         LOGE("OH_NNModel_SetTensorType failed, passed nullptr to model.");
202         return OH_NN_INVALID_PARAMETER;
203     }
204 
205     if (!Validation::ValidateTensorType(tensorType)) {
206         LOGE("OH_NNModel_SetTensorType failed, invalid tensor type.");
207         return OH_NN_INVALID_PARAMETER;
208     }
209 
210     auto* innerModel = reinterpret_cast<OHOS::NeuralNetworkRuntime::InnerModel*>(model);
211     OH_NN_ReturnCode returnCode = innerModel->SetTensorType((uint32_t)(index), tensorType);
212     if (returnCode != OH_NN_SUCCESS) {
213         LOGE("OH_NNModel_SetTensorType failed, error happened when setting tensor type.");
214     }
215 
216     return returnCode;
217 }
218 
OH_NNModel_Construct(void)219 NNRT_API OH_NNModel *OH_NNModel_Construct(void)
220 {
221     InnerModel *innerModel = new(std::nothrow) InnerModel();
222     if (innerModel == nullptr) {
223         LOGE("OH_NNModel_Construct failed, please check whether it has enough memory.");
224         return nullptr;
225     }
226 
227     OH_NNModel *nnModel = reinterpret_cast<OH_NNModel*>(innerModel);
228     return nnModel;
229 }
230 
OH_NNModel_AddOperation(OH_NNModel * model,OH_NN_OperationType op,const OH_NN_UInt32Array * paramIndices,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)231 NNRT_API OH_NN_ReturnCode OH_NNModel_AddOperation(OH_NNModel *model,
232                                                   OH_NN_OperationType op,
233                                                   const OH_NN_UInt32Array *paramIndices,
234                                                   const OH_NN_UInt32Array *inputIndices,
235                                                   const OH_NN_UInt32Array *outputIndices)
236 {
237     if (model == nullptr) {
238         LOGE("OH_NNModel_AddOperation failed, passed nullptr to model.");
239         return OH_NN_INVALID_PARAMETER;
240     }
241 
242     if (paramIndices == nullptr) {
243         LOGE("OH_NNModel_AddOperation failed, passed nullptr to paramIndices.");
244         return OH_NN_INVALID_PARAMETER;
245     }
246 
247     if (inputIndices == nullptr) {
248         LOGE("OH_NNModel_AddOperation failed, passed nullptr to inputIndices.");
249         return OH_NN_INVALID_PARAMETER;
250     }
251 
252     if (outputIndices == nullptr) {
253         LOGE("OH_NNModel_AddOperation failed, passed nullptr to outputIndices.");
254         return OH_NN_INVALID_PARAMETER;
255     }
256 
257     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
258     return innerModel->AddOperation(op, *paramIndices, *inputIndices, *outputIndices);
259 }
260 
OH_NNModel_SetTensorData(OH_NNModel * model,uint32_t index,const void * dataBuffer,size_t length)261 NNRT_API OH_NN_ReturnCode OH_NNModel_SetTensorData(OH_NNModel *model,
262                                                    uint32_t index,
263                                                    const void *dataBuffer,
264                                                    size_t length)
265 {
266     if (model == nullptr) {
267         LOGE("OH_NNModel_SetTensorData failed, passed nullptr to model.");
268         return OH_NN_INVALID_PARAMETER;
269     }
270 
271     if (dataBuffer == nullptr) {
272         LOGE("OH_NNModel_SetTensorData failed, passed nullptr to dataBuffer, which has no effect.");
273         return OH_NN_INVALID_PARAMETER;
274     }
275 
276     if (length == 0) {
277         LOGE("OH_NNModel_SetTensorData failed, passed dataBuffer with length 0, which has no effect.");
278         return OH_NN_INVALID_PARAMETER;
279     }
280 
281     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
282     return innerModel->SetTensorValue(index, dataBuffer, length);
283 }
284 
OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel * model,const OH_NN_UInt32Array * inputIndices,const OH_NN_UInt32Array * outputIndices)285 NNRT_API OH_NN_ReturnCode OH_NNModel_SpecifyInputsAndOutputs(OH_NNModel *model,
286                                                              const OH_NN_UInt32Array *inputIndices,
287                                                              const OH_NN_UInt32Array *outputIndices)
288 {
289     if (model == nullptr) {
290         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to model.");
291         return OH_NN_INVALID_PARAMETER;
292     }
293 
294     if (inputIndices == nullptr) {
295         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to inputIndices.");
296         return OH_NN_INVALID_PARAMETER;
297     }
298 
299     if (outputIndices == nullptr) {
300         LOGE("OH_NNModel_SpecifyInputsAndOutputs failed, passed nullptr to outputIndices.");
301         return OH_NN_INVALID_PARAMETER;
302     }
303 
304     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
305     return innerModel->SpecifyInputsAndOutputs(*inputIndices, *outputIndices);
306 }
307 
OH_NNModel_Finish(OH_NNModel * model)308 NNRT_API OH_NN_ReturnCode OH_NNModel_Finish(OH_NNModel *model)
309 {
310     if (model == nullptr) {
311         LOGE("OH_NNModel_Finish failed, passed nullptr to model.");
312         return OH_NN_INVALID_PARAMETER;
313     }
314 
315     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
316     return innerModel->Build();
317 }
318 
ParseInputDimsFromExtensions(char * data,size_t dataSize,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig,size_t & dynamicCount)319 OH_NN_ReturnCode ParseInputDimsFromExtensions(char* data, size_t dataSize, const mindspore::lite::LiteGraph* liteGraph,
320     ExtensionConfig& extensionConfig, size_t& dynamicCount)
321 {
322     extensionConfig.inputDims.clear();
323     int32_t* dimsValue = reinterpret_cast<int32_t*>(data);
324     size_t allDimsSize = dataSize / sizeof(int32_t);
325 
326     size_t inputCount = liteGraph->input_indices_.size(); // LiteGraph输入个数
327     size_t allTensorSize = liteGraph->all_tensors_.size(); // LiteGraph所有tensor个数
328     std::vector<int32_t> inputDim;
329     size_t dataIndex = 0;
330     for (size_t i = 0; i < inputCount; ++i) {
331         inputDim.clear();
332         if (liteGraph->input_indices_[i] >= allTensorSize) {
333             LOGE("ParseInputDimsFromExtensions failed, indice of input %u is out of range.",
334                 liteGraph->input_indices_[i]);
335             extensionConfig.inputDims.clear();
336             return OH_NN_INVALID_PARAMETER;
337         }
338         //获取当前输入的维度
339         mindspore::lite::TensorPtr tensor = liteGraph->all_tensors_[liteGraph->input_indices_[i]];
340         auto tensorDims = mindspore::lite::MindIR_Tensor_GetDims(tensor);
341         size_t inputDimSize = tensorDims.size();
342         if (allDimsSize < inputDimSize) {
343             LOGE("ParseInputDimsFromExtensions failed, dataSize is invalid.");
344             extensionConfig.inputDims.clear();
345             return OH_NN_INVALID_PARAMETER;
346         }
347         // 读取extensor中当前输入的dim值
348         for (size_t j = 0; j < inputDimSize; ++j) {
349             inputDim.emplace_back(dimsValue[dataIndex]);
350             if (dimsValue[dataIndex] == -1) {
351                 ++dynamicCount;
352             }
353             ++dataIndex;
354         }
355         extensionConfig.inputDims.emplace_back(inputDim);
356         allDimsSize -= inputDimSize;
357     }
358     // allDimsSize应和模型一致,遍历完后,allDimsSize等于0
359     if (allDimsSize != 0) {
360         LOGE("ParseInputDimsFromExtensions failed, allDimsSize is not equal to liteGraph.");
361         extensionConfig.inputDims.clear();
362         return OH_NN_INVALID_PARAMETER;
363     }
364     return OH_NN_SUCCESS;
365 }
366 
ParseDynamicDimsFromExtensions(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * liteGraph,ExtensionConfig & extensionConfig)367 OH_NN_ReturnCode ParseDynamicDimsFromExtensions(
368     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
369     const mindspore::lite::LiteGraph* liteGraph, ExtensionConfig& extensionConfig)
370 {
371     const std::vector<std::pair<char*, size_t>>& inputDims = extensionMap.at(EXTENSION_KEY_INPUT_DIMS);
372     if (inputDims.empty()) {
373         LOGE("ParseDynamicDimsFromExtensions failed, input dims is empty.");
374         return OH_NN_INVALID_PARAMETER;
375     }
376     auto dynamicDims = extensionMap.at(EXTENSION_KEY_DYNAMIC_DIMS);
377     if (dynamicDims.empty()) {
378         LOGE("ParseDynamicDimsFromExtensions failed, dynamic dims is empty.");
379         return OH_NN_INVALID_PARAMETER;
380     }
381     if (inputDims[0].first == nullptr || inputDims[0].second == 0 ||
382         dynamicDims[0].first == nullptr || dynamicDims[0].second == 0) {
383         LOGE("ParseDynamicDimsFromExtensions failed, data or dataSize is invalid.");
384         return OH_NN_INVALID_PARAMETER;
385     }
386 
387     size_t dynamicCount = 0;
388     auto returnCode = ParseInputDimsFromExtensions(
389         inputDims[0].first, inputDims[0].second, liteGraph, extensionConfig, dynamicCount);
390     if (returnCode != OH_NN_SUCCESS) {
391         LOGE("ParseDynamicDimsFromExtensions failed, failed to get input dims from extensions.");
392         return returnCode;
393     }
394     if (dynamicCount == 0) {
395         LOGE("ParseDynamicDimsFromExtensions failed, dynamic count is 0.");
396         extensionConfig.inputDims.clear();
397         return OH_NN_INVALID_PARAMETER;
398     }
399 
400     extensionConfig.dynamicDims.clear();
401     int32_t* dynamicDimsValue = reinterpret_cast<int32_t*>(dynamicDims[0].first);
402     size_t dynamicDimsSize = dynamicDims[0].second / sizeof(int32_t);
403     if ((dynamicDimsSize % dynamicCount) != 0) {
404         LOGE("ParseDynamicDimsFromExtensions failed, dynamic dataSize is invalid.");
405         extensionConfig.inputDims.clear();
406         return OH_NN_INVALID_PARAMETER;
407     }
408     size_t dynamicSize = dynamicDimsSize / dynamicCount;
409     std::vector<int32_t> dynamicDim;
410     size_t dataIndex = 0;
411     for (size_t i = 0; i < dynamicSize; ++i) {
412         dynamicDim.clear();
413         for (size_t j = 0; j < dynamicCount; ++j) {
414             dynamicDim.emplace_back(dynamicDimsValue[dataIndex]);
415             ++dataIndex;
416         }
417         extensionConfig.dynamicDims.emplace_back(dynamicDim);
418     }
419 
420     return OH_NN_SUCCESS;
421 }
422 
CheckExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,ExtensionConfig & extensionConfig)423 OH_NN_ReturnCode CheckExtensionConfigs(
424     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
425     ExtensionConfig& extensionConfig)
426 {
427     if (extensionMap.find(EXTENSION_KEY_QUANT_BUFFER) != extensionMap.end()) {
428         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_QUANT_BUFFER);
429         if (value.empty()) {
430             LOGE("ParseExtensionConfigs failed, get empty quant buffer value.");
431             return OH_NN_INVALID_PARAMETER;
432         }
433         extensionConfig.quantBuffer.data = value[0].first;
434         extensionConfig.quantBuffer.length = value[0].second;
435     }
436     if (extensionMap.find(EXTENSION_KEY_MODEL_NAME) != extensionMap.end()) {
437         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_MODEL_NAME);
438         if (value.empty()) {
439             LOGE("ParseExtensionConfigs failed, get empty model name value.");
440             return OH_NN_INVALID_PARAMETER;
441         }
442         extensionConfig.modelName.assign(value[0].first, value[0].first + value[0].second);
443     }
444     if (extensionMap.find(EXTENSION_KEY_IS_PROFILING) != extensionMap.end()) {
445         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_IS_PROFILING);
446         if (value.empty()) {
447             LOGE("ParseExtensionConfigs failed, get empty isProfiling value.");
448             return OH_NN_INVALID_PARAMETER;
449         }
450         extensionConfig.isProfiling.assign(value[0].first, value[0].first + value[0].second);
451     }
452     if (extensionMap.find(EXTENSION_KEY_OP_LAYOUT) != extensionMap.end()) {
453         const std::vector<std::pair<char*, size_t>>& value = extensionMap.at(EXTENSION_KEY_OP_LAYOUT);
454         if (value.empty()) {
455             LOGE("ParseExtensionConfigs failed, get empty op layout value.");
456             return OH_NN_INVALID_PARAMETER;
457         }
458         std::string ops;
459         for (auto singleValue : value) {
460             ops.assign(singleValue.first, singleValue.first + singleValue.second);
461             extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
462             LOGI("ParseExtensionConfigs opLayout:%{public}s.", ops.c_str());
463         }
464     }
465     return OH_NN_SUCCESS;
466 }
467 
ParseExtensionConfigs(const std::unordered_map<std::string,std::vector<std::pair<char *,size_t>>> & extensionMap,const mindspore::lite::LiteGraph * pLiteGraph,ExtensionConfig & extensionConfig)468 OH_NN_ReturnCode ParseExtensionConfigs(
469     const std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>>& extensionMap,
470     const mindspore::lite::LiteGraph* pLiteGraph, ExtensionConfig& extensionConfig)
471 {
472     extensionConfig.tuningStrategy = TuningStrategy::ON_DEVICE_PREPROCESS_TUNING;
473     OH_NN_ReturnCode ret = CheckExtensionConfigs(extensionMap, extensionConfig);
474     if (ret != OH_NN_SUCCESS) {
475         LOGE("CheckExtensionConfigs failed.");
476         return ret;
477     }
478     if (extensionMap.find(EXTENSION_KEY_INPUT_DIMS) != extensionMap.end() &&
479         extensionMap.find(EXTENSION_KEY_DYNAMIC_DIMS) != extensionMap.end()) {
480         auto returnCode = ParseDynamicDimsFromExtensions(extensionMap, pLiteGraph, extensionConfig);
481         if (returnCode != OH_NN_SUCCESS) {
482             LOGE("ParseExtensionConfigs failed, parse dynamic dims from extensions failed.");
483             return returnCode;
484         }
485         extensionConfig.tuningStrategy = TuningStrategy::OFF; // 分档shape不支持fftl
486     }
487     if (extensionMap.find(EXTENSION_KEY_FM_SHARED) != extensionMap.end()) {
488         extensionConfig.isNpuFmShared = true;
489         LOGI("NNRT enable fm shared success.");
490     }
491     return OH_NN_SUCCESS;
492 }
493 
OH_NNModel_BuildFromLiteGraph(OH_NNModel * model,const void * liteGraph,const OH_NN_Extension * extensions,size_t extensionSize)494 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromLiteGraph(OH_NNModel *model, const void *liteGraph,
495     const OH_NN_Extension *extensions, size_t extensionSize)
496 {
497     if (model == nullptr) {
498         LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to model.");
499         return OH_NN_INVALID_PARAMETER;
500     }
501 
502     if (liteGraph == nullptr) {
503         LOGE("OH_NNModel_BuildFromLiteGraph failed, passed nullptr to liteGraph.");
504         return OH_NN_INVALID_PARAMETER;
505     }
506 
507     auto *pLiteGraph = reinterpret_cast<const mindspore::lite::LiteGraph*>(liteGraph);
508     ExtensionConfig extensionConfig;
509     std::unordered_map<std::string, std::vector<std::pair<char*, size_t>>> extensionMap;
510     for (size_t i = 0; i < extensionSize; ++i) {
511         std::string name = extensions[i].name;
512         if (extensionMap.find(name) == extensionMap.end()) {
513             extensionMap.insert({name, {{extensions[i].value, extensions[i].valueSize}}});
514         } else {
515             extensionMap[name].push_back({extensions[i].value, extensions[i].valueSize});
516         }
517     }
518     auto returnCode = ParseExtensionConfigs(extensionMap, pLiteGraph, extensionConfig);
519     if (returnCode != OH_NN_SUCCESS) {
520         LOGE("OH_NNModel_BuildFromLiteGraph failed, parse extension configs failed.");
521         return returnCode;
522     }
523 
524     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
525 
526     // Once the innerModel built from the liteGraph successfully, the innerModel
527     // owns the liteGraph, in which case, the invoker should not delete
528     // the liteGraph actively. Otherwise, the invoker still has the ownership.
529     return innerModel->BuildFromLiteGraph(pLiteGraph, extensionConfig);
530 }
531 
532 namespace {
CheckCacheFile(const std::string & cacheInfoPath,int64_t & fileNumber,int64_t & cacheVersion)533 OH_NN_ReturnCode CheckCacheFile(const std::string& cacheInfoPath, int64_t& fileNumber, int64_t& cacheVersion)
534 {
535     // read number of cache models
536     char path[PATH_MAX];
537     if (realpath(cacheInfoPath.c_str(), path) == nullptr) {
538         LOGE("OH_NNModel_HasCache get real path of cache info failed.");
539         return OH_NN_INVALID_PARAMETER;
540     }
541 
542     if (access(path, F_OK) != 0) {
543         LOGE("OH_NNModel_HasCache access cache info file failed.");
544         return OH_NN_INVALID_PARAMETER;
545     }
546 
547     std::ifstream ifs(path, std::ios::in | std::ios::binary);
548     if (!ifs) {
549         LOGE("OH_NNModel_HasCache open cache info file failed.");
550         return OH_NN_INVALID_PARAMETER;
551     }
552 
553     if (!ifs.read(reinterpret_cast<char*>(&(fileNumber)), sizeof(fileNumber))) {
554         LOGI("OH_NNModel_HasCache read cache info file failed.");
555         ifs.close();
556         return OH_NN_INVALID_PARAMETER;
557     }
558 
559     if (!ifs.read(reinterpret_cast<char*>(&(cacheVersion)), sizeof(cacheVersion))) {
560         LOGI("OH_NNModel_HasCache read cache info file failed.");
561         ifs.close();
562         return OH_NN_INVALID_PARAMETER;
563     }
564 
565     ifs.close();
566     return OH_NN_SUCCESS;
567 }
568 }
569 
OH_NNModel_HasCache(const char * cacheDir,const char * modelName,uint32_t version)570 NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName, uint32_t version)
571 {
572     if (cacheDir == nullptr) {
573         LOGI("OH_NNModel_HasCache get empty cache directory.");
574         return false;
575     }
576 
577     if (modelName == nullptr) {
578         LOGI("OH_NNModel_HasCache get empty model name.");
579         return false;
580     }
581 
582     std::string cacheInfoPath = std::string(cacheDir) + "/" + std::string(modelName) + "cache_info.nncache";
583 
584     // determine whether cache info file exists
585     struct stat buffer;
586     bool exist = (stat(cacheInfoPath.c_str(), &buffer) == 0);
587     if (!exist) {
588         return false;
589     }
590 
591     int64_t fileNumber{0};
592     int64_t cacheVersion{0};
593     OH_NN_ReturnCode returnCode = CheckCacheFile(cacheInfoPath, fileNumber, cacheVersion);
594     if (returnCode != OH_NN_SUCCESS) {
595         LOGE("OH_NNModel_HasCache get fileNumber or cacheVersion fail.");
596         return false;
597     }
598 
599     if (fileNumber <= 0 || fileNumber > FILE_NUMBER_MAX) {
600         LOGE("OH_NNModel_HasCache fileNumber is invalid or more than 100");
601         std::filesystem::remove_all(cacheInfoPath);
602         return false;
603     }
604 
605     // determine whether cache model files exist
606     for (int64_t i = 0; i < fileNumber; ++i) {
607         std::string cacheModelPath =
608             std::string(cacheDir) + "/" + std::string(modelName) + std::to_string(i) + ".nncache";
609         exist = (exist && (stat(cacheModelPath.c_str(), &buffer) == 0));
610         if (!exist) {
611             LOGE("OH_NNModel_HasCache cacheModelPath is not existed.");
612             std::filesystem::remove_all(cacheInfoPath);
613             return false;
614         }
615     }
616 
617     if (cacheVersion != version) {
618         LOGE("OH_NNModel_HasCache version is not match.");
619         exist = false;
620     }
621 
622     return exist;
623 }
624 
OH_NNModel_BuildFromMetaGraph(OH_NNModel * model,const void * metaGraph,const OH_NN_Extension * extensions,size_t extensionSize)625 NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromMetaGraph(OH_NNModel *model, const void *metaGraph,
626     const OH_NN_Extension *extensions, size_t extensionSize)
627 {
628     if (model == nullptr) {
629         LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to model.");
630         return OH_NN_INVALID_PARAMETER;
631     }
632 
633     if (metaGraph == nullptr) {
634         LOGE("OH_NNModel_BuildFromMetaGraph failed, passed nullptr to metaGraph.");
635         return OH_NN_INVALID_PARAMETER;
636     }
637 
638     ExtensionConfig extensionConfig;
639     std::string ops;
640     for (size_t i = 0; i < extensionSize; ++i) {
641         std::string name = extensions[i].name;
642         if (name == "QuantBuffer") {
643             extensionConfig.quantBuffer.data = extensions[i].value;
644             extensionConfig.quantBuffer.length = extensions[i].valueSize;
645         } else if (name == "ModelName") {
646             extensionConfig.modelName.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
647         } else if (name == "Profiling") {
648             extensionConfig.isProfiling.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
649             LOGI("OH_NNModel_BuildFromMetaGraph isProfiling enable.");
650         } else if (name == "opLayout") {
651             ops.assign(extensions[i].value, extensions[i].value + extensions[i].valueSize);
652             extensionConfig.opLayout.insert({ops, "hiai::ExecuteDevice::CPU"});
653             LOGI("OH_NNModel_BuildFromMetaGraph opLayout:%{public}s.", ops.c_str());
654         }
655     }
656 
657     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
658     return innerModel->BuildFromMetaGraph(metaGraph, extensionConfig);
659 }
660 
OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel * model,const OH_NN_TensorInfo * inputsInfo,size_t inputSize,const OH_NN_TensorInfo * outputsInfo,size_t outputSize)661 NNRT_API OH_NN_ReturnCode OH_NNModel_SetInputsAndOutputsInfo(OH_NNModel *model, const OH_NN_TensorInfo *inputsInfo,
662     size_t inputSize, const OH_NN_TensorInfo *outputsInfo, size_t outputSize)
663 {
664     if (model == nullptr) {
665         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, passed nullptr to model.");
666         return OH_NN_INVALID_PARAMETER;
667     }
668 
669     if ((inputsInfo == nullptr) || (inputSize == 0)) {
670         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, inputsInfo is empty.");
671         return OH_NN_INVALID_PARAMETER;
672     }
673 
674     if ((outputsInfo == nullptr) || (outputSize == 0)) {
675         LOGE("OH_NNModel_SetInputsAndOutputsInfo failed, outputsInfo is empty.");
676         return OH_NN_INVALID_PARAMETER;
677     }
678 
679     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
680     return innerModel->SetInputsAndOutputsInfo(inputsInfo, inputSize, outputsInfo, outputSize);
681 }
682 
OH_NNModel_Destroy(OH_NNModel ** model)683 NNRT_API void OH_NNModel_Destroy(OH_NNModel **model)
684 {
685     if (model == nullptr) {
686         LOGW("OH_NNModel_Destroy has no effect, passed nullptr to model.");
687         return;
688     }
689 
690     if (*model == nullptr) {
691         LOGW("OH_NNModel_Destroy has no effect, passed nullptr to *model.");
692         return;
693     }
694 
695     InnerModel *innerModel = reinterpret_cast<InnerModel*>(*model);
696     delete innerModel;
697     *model = nullptr;
698 }
699 
OH_NNModel_GetAvailableOperations(OH_NNModel * model,size_t deviceID,const bool ** isAvailable,uint32_t * opCount)700 NNRT_API OH_NN_ReturnCode OH_NNModel_GetAvailableOperations(OH_NNModel *model,
701                                                             size_t deviceID,
702                                                             const bool **isAvailable,
703                                                             uint32_t *opCount)
704 {
705     if (model == nullptr) {
706         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to model.");
707         return OH_NN_INVALID_PARAMETER;
708     }
709 
710     if (isAvailable == nullptr) {
711         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to isAvailable.");
712         return OH_NN_INVALID_PARAMETER;
713     }
714 
715     if (*isAvailable != nullptr) {
716         LOGE("OH_NNModel_GetAvailableOperations failed, *isAvailable is not nullptr.");
717         return OH_NN_INVALID_PARAMETER;
718     }
719 
720     if (opCount == nullptr) {
721         LOGE("OH_NNModel_GetAvailableOperations failed, passed nullptr to opCount.");
722         return OH_NN_INVALID_PARAMETER;
723     }
724 
725     InnerModel *innerModel = reinterpret_cast<InnerModel*>(model);
726     return innerModel->GetSupportedOperations(deviceID, isAvailable, *opCount);
727 }
728 
OH_NN_GetDeviceID(char * nnrtDevice,size_t len)729 NNRT_API OH_NN_ReturnCode OH_NN_GetDeviceID(char *nnrtDevice, size_t len)
730 {
731     if (nnrtDevice == nullptr || len == 0) {
732         LOGE("nnrtDevice is nullptr or len is 0.");
733         return OH_NN_INVALID_PARAMETER;
734     }
735 
736     char cName[HARDWARE_NAME_MAX_LENGTH] = {0};
737     int ret = GetParameter(HARDWARE_NAME.c_str(), NULL_HARDWARE_NAME.c_str(), cName, HARDWARE_NAME_MAX_LENGTH);
738     // 如果成功获取返回值为硬件名称的字节数
739     if (ret <= 0) {
740         LOGE("GetNNRtDeviceName failed, failed to get parameter.");
741         return OH_NN_FAILED;
742     }
743 
744     std::string deviceName = (std::string)cName + "_" + HARDWARE_VERSION;
745     auto secureRet = strcpy_s(nnrtDevice, len, deviceName.c_str());
746     if (secureRet != EOK) {
747         LOGE("GetNNRtDeviceName failed, failed to get name.");
748         return OH_NN_FAILED;
749     }
750     return OH_NN_SUCCESS;
751 }