1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "nnrt_implementation.h"
17
18 #include <sys/mman.h>
19 #include <sys/stat.h>
20 #include <unistd.h>
21 #include <iostream>
22
23 #include <algorithm>
24 #include <string>
25
26 namespace tflite {
27 // These function parameters are guaranteed to be nullptr by the caller
28 template<class T>
LoadFunction(void * handle,const char * name,T * nnrtFunction)29 void LoadFunction(void* handle, const char* name, T* nnrtFunction)
30 {
31 if (name == nullptr) {
32 NNRT_LOG("nnrt error: the function %s does not exist.", name);
33 return;
34 }
35
36 void* fn = dlsym(handle, name);
37 if (fn == nullptr) {
38 NNRT_LOG("nnrt error: unable to open function %s", name);
39 return;
40 }
41
42 *nnrtFunction = reinterpret_cast<T>(fn);
43 return;
44 }
45
LoadNnrt()46 const NnrtApi LoadNnrt()
47 {
48 NnrtApi nnrt;
49 nnrt.nnrtExists = false;
50 void* libNeuralNetworks = nullptr;
51
52 // Assumes there can be multiple instances of NN API
53 static std::string nnrtLibraryName = "libneural_network_runtime.z.so";
54 libNeuralNetworks = dlopen(nnrtLibraryName.c_str(), RTLD_LAZY | RTLD_NODELETE);
55 if (libNeuralNetworks == nullptr) {
56 NNRT_LOG("nnrt error: unable to open library %s", nnrtLibraryName.c_str());
57 return nnrt;
58 } else {
59 nnrt.nnrtExists = true;
60 }
61
62 // NNModel
63 LoadFunction(libNeuralNetworks, "OH_NNModel_Construct", &nnrt.OH_NNModel_Construct);
64 LoadFunction(libNeuralNetworks, "OH_NNModel_AddTensor", &nnrt.OH_NNModel_AddTensor);
65 LoadFunction(libNeuralNetworks, "OH_NNModel_SetTensorData", &nnrt.OH_NNModel_SetTensorData);
66 LoadFunction(libNeuralNetworks, "OH_NNModel_AddOperation", &nnrt.OH_NNModel_AddOperation);
67 LoadFunction(libNeuralNetworks, "OH_NNModel_SpecifyInputsAndOutputs", &nnrt.OH_NNModel_SpecifyInputsAndOutputs);
68 LoadFunction(libNeuralNetworks, "OH_NNModel_Finish", &nnrt.OH_NNModel_Finish);
69 LoadFunction(libNeuralNetworks, "OH_NNModel_Destroy", &nnrt.OH_NNModel_Destroy);
70 LoadFunction(libNeuralNetworks, "OH_NNModel_GetAvailableOperations", &nnrt.OH_NNModel_GetAvailableOperations);
71
72 // NNCompilation
73 LoadFunction(libNeuralNetworks, "OH_NNCompilation_Construct", &nnrt.OH_NNCompilation_Construct);
74 LoadFunction(libNeuralNetworks, "OH_NNCompilation_SetDevice", &nnrt.OH_NNCompilation_SetDevice);
75 LoadFunction(libNeuralNetworks, "OH_NNCompilation_SetCache", &nnrt.OH_NNCompilation_SetCache);
76 LoadFunction(libNeuralNetworks, "OH_NNCompilation_SetPerformanceMode", &nnrt.OH_NNCompilation_SetPerformanceMode);
77 LoadFunction(libNeuralNetworks, "OH_NNCompilation_SetPriority", &nnrt.OH_NNCompilation_SetPriority);
78 LoadFunction(libNeuralNetworks, "OH_NNCompilation_EnableFloat16", &nnrt.OH_NNCompilation_EnableFloat16);
79 LoadFunction(libNeuralNetworks, "OH_NNCompilation_Build", &nnrt.OH_NNCompilation_Build);
80 LoadFunction(libNeuralNetworks, "OH_NNCompilation_Destroy", &nnrt.OH_NNCompilation_Destroy);
81
82 // NNExecutor
83 LoadFunction(libNeuralNetworks, "OH_NNExecutor_Construct", &nnrt.OH_NNExecutor_Construct);
84 LoadFunction(libNeuralNetworks, "OH_NNExecutor_SetInput", &nnrt.OH_NNExecutor_SetInput);
85 LoadFunction(libNeuralNetworks, "OH_NNExecutor_SetOutput", &nnrt.OH_NNExecutor_SetOutput);
86 LoadFunction(libNeuralNetworks, "OH_NNExecutor_GetOutputShape", &nnrt.OH_NNExecutor_GetOutputShape);
87 LoadFunction(libNeuralNetworks, "OH_NNExecutor_Run", &nnrt.OH_NNExecutor_Run);
88 LoadFunction(libNeuralNetworks, "OH_NNExecutor_AllocateInputMemory", &nnrt.OH_NNExecutor_AllocateInputMemory);
89 LoadFunction(libNeuralNetworks, "OH_NNExecutor_AllocateOutputMemory", &nnrt.OH_NNExecutor_AllocateOutputMemory);
90 LoadFunction(libNeuralNetworks, "OH_NNExecutor_DestroyInputMemory", &nnrt.OH_NNExecutor_DestroyInputMemory);
91 LoadFunction(libNeuralNetworks, "OH_NNExecutor_DestroyOutputMemory", &nnrt.OH_NNExecutor_DestroyOutputMemory);
92 LoadFunction(libNeuralNetworks, "OH_NNExecutor_SetInputWithMemory", &nnrt.OH_NNExecutor_SetInputWithMemory);
93 LoadFunction(libNeuralNetworks, "OH_NNExecutor_SetOutputWithMemory", &nnrt.OH_NNExecutor_SetOutputWithMemory);
94 LoadFunction(libNeuralNetworks, "OH_NNExecutor_Destroy", &nnrt.OH_NNExecutor_Destroy);
95
96 // NNDevice
97 LoadFunction(libNeuralNetworks, "OH_NNDevice_GetAllDevicesID", &nnrt.OH_NNDevice_GetAllDevicesID);
98 LoadFunction(libNeuralNetworks, "OH_NNDevice_GetName", &nnrt.OH_NNDevice_GetName);
99 LoadFunction(libNeuralNetworks, "OH_NNDevice_GetType", &nnrt.OH_NNDevice_GetType);
100
101 return nnrt;
102 }
103
NnrtImplementation()104 const NnrtApi* NnrtImplementation()
105 {
106 static const NnrtApi nnrt = LoadNnrt();
107 if (!nnrt.nnrtExists) {
108 return nullptr;
109 }
110 return &nnrt;
111 }
112
113 } // namespace tflite
114