1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "nnrt_client.h"
17
18 #include <dlfcn.h>
19 #include <string>
20
21 #include "common/log.h"
22
23 namespace OHOS {
24 namespace NeuralNetworkRuntime {
25 template<class T>
LoadFunction(void * handle,const char * name,T * nnrtFunction)26 void LoadFunction(void* handle, const char* name, T* nnrtFunction)
27 {
28 if (name == nullptr) {
29 LOGE("LoadFunction error: the function %s does not exist.", name);
30 return;
31 }
32
33 void* fn = dlsym(handle, name);
34 if (fn == nullptr) {
35 LOGE("LoadFunction error: unable to open function %{public}s", name);
36 return;
37 }
38
39 *nnrtFunction = reinterpret_cast<T>(fn);
40 return;
41 }
42
43 static void* libNNRtService = nullptr;
44
GetInstance()45 NNRtServiceApi& NNRtServiceApi::GetInstance()
46 {
47 static NNRtServiceApi nnrtService;
48
49 // Assumes there can be multiple instances of NN API
50 std::string nnrtLibraryName = "libdllite_service_client.z.so";
51 if (libNNRtService == nullptr) {
52 libNNRtService = dlopen(nnrtLibraryName.c_str(), RTLD_LAZY | RTLD_NODELETE);
53 if (libNNRtService == nullptr) {
54 LOGE("LoadNNRtService error: unable to open library %{public}s", nnrtLibraryName.c_str());
55 nnrtService.m_serviceAvailable = false;
56 return nnrtService;
57 }
58 }
59
60 LoadFunction(libNNRtService, "CheckModelSizeFromPath", &nnrtService.CheckModelSizeFromPath);
61 LoadFunction(libNNRtService, "CheckModelSizeFromBuffer", &nnrtService.CheckModelSizeFromBuffer);
62 LoadFunction(libNNRtService, "CheckModelSizeFromModel", &nnrtService.CheckModelSizeFromModel);
63 LoadFunction(libNNRtService, "GetNNRtModelIDFromPath", &nnrtService.GetNNRtModelIDFromPath);
64 LoadFunction(libNNRtService, "GetNNRtModelIDFromCache", &nnrtService.GetNNRtModelIDFromCache);
65 LoadFunction(libNNRtService, "GetNNRtModelIDFromBuffer", &nnrtService.GetNNRtModelIDFromBuffer);
66 LoadFunction(libNNRtService, "GetNNRtModelIDFromModel", &nnrtService.GetNNRtModelIDFromModel);
67 LoadFunction(libNNRtService, "SetModelID", &nnrtService.SetModelID);
68 LoadFunction(libNNRtService, "IsSupportAuthentication", &nnrtService.IsSupportAuthentication);
69 LoadFunction(libNNRtService, "IsSupportScheduling", &nnrtService.IsSupportScheduling);
70 LoadFunction(libNNRtService, "Authentication", &nnrtService.Authentication);
71 LoadFunction(libNNRtService, "Scheduling", &nnrtService.Scheduling);
72 LoadFunction(libNNRtService, "UpdateModelLatency", &nnrtService.UpdateModelLatency);
73 LoadFunction(libNNRtService, "Unload", &nnrtService.Unload);
74
75 nnrtService.m_serviceAvailable = true;
76 return nnrtService;
77 }
78
IsServiceAvaliable() const79 bool NNRtServiceApi::IsServiceAvaliable() const
80 {
81 return m_serviceAvailable;
82 }
83
~NNRtServiceApi()84 NNRtServiceApi::~NNRtServiceApi()
85 {
86 if (libNNRtService != nullptr) {
87 dlclose(libNNRtService);
88 libNNRtService = nullptr;
89 }
90 }
91 } // namespace NeuralNetworkRuntime
92 } // namespace OHOS