1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "nncompiled_cache.h"
17
18 #include <unistd.h>
19 #include <functional>
20 #include <memory>
21 #include <limits>
22
23 #include "common/utils.h"
24 #include "backend_manager.h"
25 #include "nnbackend.h"
26
27 namespace OHOS {
28 namespace NeuralNetworkRuntime {
29 constexpr int32_t MAX_MODEL_SIZE = 500 * 1024 * 1024; // 200MB
30 constexpr int32_t NULL_PTR_LENGTH = 0;
31 constexpr int32_t NUMBER_CACHE_INFO_MEMBERS = 3;
32 constexpr int32_t HEX_UNIT = 16;
33 constexpr char ROOT_DIR_STR = '/';
34 constexpr char DOUBLE_SLASH_STR[] = "//";
35 constexpr int OPVERSION_SUBSTR_NUM = 2;
36 const std::string CURRENT_VERSION = "0x00000000";
37 const std::string HIAI_VERSION_PATH = "/data/data/hiai/version";
38
Save(const std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches,const std::string & cacheDir,uint32_t version)39 OH_NN_ReturnCode NNCompiledCache::Save(const std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches,
40 const std::string& cacheDir,
41 uint32_t version)
42 {
43 if (caches.empty()) {
44 LOGE("[NNCompiledCache] Save failed, caches is empty.");
45 return OH_NN_INVALID_PARAMETER;
46 }
47
48 if (m_device == nullptr) {
49 LOGE("[NNCompiledCache] Save failed, m_device is empty.");
50 return OH_NN_INVALID_PARAMETER;
51 }
52
53 OH_NN_ReturnCode ret = GenerateCacheFiles(caches, cacheDir, version);
54 if (ret != OH_NN_SUCCESS) {
55 LOGE("[NNCompiledCache] Save failed, error happened when calling GenerateCacheFiles.");
56 return ret;
57 }
58
59 LOGI("[NNCompiledCache] Save success. %zu caches are saved.", caches.size());
60 return OH_NN_SUCCESS;
61 }
62
Restore(const std::string & cacheDir,uint32_t version,std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches)63 OH_NN_ReturnCode NNCompiledCache::Restore(const std::string& cacheDir,
64 uint32_t version,
65 std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches)
66 {
67 if (cacheDir.empty()) {
68 LOGE("[NNCompiledCache] Restore failed, cacheDir is empty.");
69 return OH_NN_INVALID_PARAMETER;
70 }
71
72 if (!caches.empty()) {
73 LOGE("[NNCompiledCache] Restore failed, caches is not empty.");
74 return OH_NN_INVALID_PARAMETER;
75 }
76
77 if (m_device == nullptr) {
78 LOGE("[NNCompiledCache] Restore failed, m_device is empty.");
79 return OH_NN_INVALID_PARAMETER;
80 }
81
82 std::string cacheInfoPath = cacheDir + "/" + m_modelName + "cache_info.nncache";
83 char path[PATH_MAX];
84 if (realpath(cacheInfoPath.c_str(), path) == nullptr) {
85 LOGE("[NNCompiledCache] Restore failed, fail to get the real path of cacheInfoPath.");
86 return OH_NN_INVALID_PARAMETER;
87 }
88 if (access(cacheInfoPath.c_str(), F_OK) != 0) {
89 LOGE("[NNCompiledCache] Restore failed, cacheInfoPath is not exist.");
90 return OH_NN_INVALID_PARAMETER;
91 }
92
93 NNCompiledCacheInfo cacheInfo;
94 OH_NN_ReturnCode ret = CheckCacheInfo(cacheInfo, cacheInfoPath);
95 if (ret != OH_NN_SUCCESS) {
96 LOGE("[NNCompiledCache] Restore failed, error happened when calling CheckCacheInfo.");
97 return ret;
98 }
99
100 if (static_cast<int64_t>(version) > cacheInfo.version) {
101 LOGE("[NNCompiledCache] Restore failed, version is not match.");
102 return OH_NN_INVALID_PARAMETER;
103 }
104
105 if (static_cast<int64_t>(version) < cacheInfo.version) {
106 LOGE("[NNCompiledCache] Restore failed, the current version is lower than the cache files, "
107 "please set a higher version.");
108 return OH_NN_OPERATION_FORBIDDEN;
109 }
110
111 for (uint32_t i = 0; i < cacheInfo.fileNumber; ++i) {
112 std::string cacheModelPath = cacheDir + "/" + m_modelName + std::to_string(i) + ".nncache";
113 if (access(cacheModelPath.c_str(), 0) != 0) {
114 LOGE("[NNCompiledCache] Restore failed, %{public}s is not exist.", cacheModelPath.c_str());
115 return OH_NN_INVALID_PARAMETER;
116 }
117
118 OHOS::NeuralNetworkRuntime::Buffer modelBuffer;
119 ret = ReadCacheModelFile(cacheModelPath, modelBuffer);
120 if (ret != OH_NN_SUCCESS) {
121 LOGE("[NNCompiledCache] Restore failed, error happened when calling ReadCacheModelFile.");
122 return ret;
123 }
124
125 if (GetCrc16(static_cast<char*>(modelBuffer.data), modelBuffer.length) !=
126 cacheInfo.modelCheckSum[i]) {
127 LOGE("[NNCompiledCache] Restore failed, the cache model file %{public}s has been changed.",
128 cacheModelPath.c_str());
129 return OH_NN_INVALID_FILE;
130 }
131
132 caches.emplace_back(std::move(modelBuffer));
133 }
134
135 return ret;
136 }
137
SetBackend(size_t backendID)138 OH_NN_ReturnCode NNCompiledCache::SetBackend(size_t backendID)
139 {
140 BackendManager& backendManager = BackendManager::GetInstance();
141 std::shared_ptr<Backend> backend = backendManager.GetBackend(backendID);
142 if (backend == nullptr) {
143 LOGE("[NNCompiledCache] SetBackend failed, backend with backendID %{public}zu is not exist.", backendID);
144 return OH_NN_INVALID_PARAMETER;
145 }
146
147 std::shared_ptr<NNBackend> nnBackend = std::reinterpret_pointer_cast<NNBackend>(backend);
148 m_device = nnBackend->GetDevice();
149 if (m_device == nullptr) {
150 LOGE("[NNCompiledCache] SetBackend failed, device with backendID %{public}zu is not exist.", backendID);
151 return OH_NN_FAILED;
152 }
153
154 m_backendID = backendID;
155 return OH_NN_SUCCESS;
156 }
157
SetModelName(const std::string & modelName)158 void NNCompiledCache::SetModelName(const std::string& modelName)
159 {
160 m_modelName = modelName;
161 }
162
GenerateCacheFiles(const std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches,const std::string & cacheDir,uint32_t version) const163 OH_NN_ReturnCode NNCompiledCache::GenerateCacheFiles(const std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches,
164 const std::string& cacheDir,
165 uint32_t version) const
166 {
167 const size_t cacheNumber = caches.size();
168 uint32_t cacheSize = NUMBER_CACHE_INFO_MEMBERS + cacheNumber + 1;
169 std::unique_ptr<int64_t[]> cacheInfo = CreateUniquePtr<int64_t[]>(cacheSize);
170 if (cacheInfo == nullptr) {
171 LOGE("[NNCompiledCache] GenerateCacheFiles failed, fail to create cacheInfo instance.");
172 return OH_NN_MEMORY_ERROR;
173 }
174
175 OH_NN_ReturnCode ret = GenerateCacheModel(caches, cacheInfo, cacheDir, version);
176 if (ret != OH_NN_SUCCESS) {
177 LOGE("[NNCompiledCache] GenerateCacheFiles failed, error happened when calling GenerateCacheModel.");
178 return ret;
179 }
180
181 uint32_t infoCharNumber = cacheSize * sizeof(uint64_t);
182 ret = WriteCacheInfo(infoCharNumber, cacheInfo, cacheDir);
183 if (ret != OH_NN_SUCCESS) {
184 LOGE("[NNCompiledCache] GenerateCacheFiles failed, error happened when calling WriteCacheInfo.");
185 return ret;
186 }
187
188 return OH_NN_SUCCESS;
189 }
190
GenerateCacheModel(const std::vector<OHOS::NeuralNetworkRuntime::Buffer> & caches,std::unique_ptr<int64_t[]> & cacheInfo,const std::string & cacheDir,uint32_t version) const191 OH_NN_ReturnCode NNCompiledCache::GenerateCacheModel(const std::vector<OHOS::NeuralNetworkRuntime::Buffer>& caches,
192 std::unique_ptr<int64_t[]>& cacheInfo,
193 const std::string& cacheDir,
194 uint32_t version) const
195 {
196 size_t cacheNumber = caches.size();
197 if (cacheNumber == 0 || cacheNumber > NN_CACHE_FILE_NUMBER_MAX) {
198 LOGE("[NNCompiledCache] Caches size is equal 0 or greater than 100.");
199 return OH_NN_FAILED;
200 }
201
202 auto cacheInfoPtr = cacheInfo.get();
203 *cacheInfoPtr++ = static_cast<int64_t>(cacheNumber);
204 *cacheInfoPtr++ = static_cast<int64_t>(version);
205 *cacheInfoPtr++ = static_cast<int64_t>(m_backendID); // Should call SetBackend first.
206
207 // standardize the input dir
208 OH_NN_ReturnCode ret = OH_NN_SUCCESS;
209 char path[PATH_MAX];
210 if (realpath(cacheDir.c_str(), path) == nullptr) {
211 LOGE("[NNCompiledCache] GenerateCacheModel failed, fail to get the real path of cacheDir.");
212 return OH_NN_INVALID_PARAMETER;
213 }
214
215 // verify the Standardized path available
216 ret = VerifyCachePath(path);
217 if (ret != OH_NN_SUCCESS) {
218 LOGE("[NNCompiledCache] GenerateCacheModel failed, fail to verify the file path of cacheDir.");
219 return ret;
220 }
221
222 std::string cachePath = path;
223 for (size_t i = 0; i < cacheNumber; ++i) {
224 std::string cacheModelFile = cachePath + "/" + m_modelName + std::to_string(i) + ".nncache";
225 std::ofstream cacheModelStream(cacheModelFile, std::ios::binary | std::ios::out | std::ios::trunc);
226 if (cacheModelStream.fail()) {
227 LOGE("[NNCompiledCache] GenerateCacheModel failed, model cache file is invalid.");
228 return OH_NN_INVALID_PARAMETER;
229 }
230
231 uint64_t checkSum =
232 static_cast<int64_t>(GetCrc16(static_cast<char*>(caches[i].data), caches[i].length));
233 *cacheInfoPtr++ = checkSum;
234 if (!cacheModelStream.write(static_cast<const char*>(caches[i].data), caches[i].length)) {
235 LOGE("[NNCompiledCache] GenerateCacheModel failed, fail to write cache model.");
236 cacheModelStream.close();
237 return OH_NN_SAVE_CACHE_EXCEPTION;
238 };
239
240 cacheModelStream.close();
241 }
242
243 std::string currentVersion = CURRENT_VERSION;
244 char versionPath[PATH_MAX];
245 if (realpath(HIAI_VERSION_PATH.c_str(), versionPath) != nullptr) {
246 std::ifstream inf(versionPath);
247 if (inf.is_open()) {
248 getline(inf, currentVersion);
249 }
250 inf.close();
251 }
252
253 int currentOpVersion = std::stoi(currentVersion.substr(OPVERSION_SUBSTR_NUM));
254 *cacheInfoPtr++ = currentOpVersion;
255
256 return OH_NN_SUCCESS;
257 }
258
WriteCacheInfo(uint32_t cacheSize,std::unique_ptr<int64_t[]> & cacheInfo,const std::string & cacheDir) const259 OH_NN_ReturnCode NNCompiledCache::WriteCacheInfo(uint32_t cacheSize,
260 std::unique_ptr<int64_t[]>& cacheInfo,
261 const std::string& cacheDir) const
262 {
263 // standardize the input dir
264 char path[PATH_MAX];
265 if (realpath(cacheDir.c_str(), path) == nullptr) {
266 LOGE("[NNCompiledCache] WriteCacheInfo failed, fail to get the real path of cacheDir.");
267 return OH_NN_INVALID_PARAMETER;
268 }
269
270 // verify the Standardized path available
271 OH_NN_ReturnCode ret = VerifyCachePath(path);
272 if (ret != OH_NN_SUCCESS) {
273 LOGE("[NNCompiledCache] WriteCacheInfo failed, fail to verify the file path of cacheDir.");
274 return ret;
275 }
276
277 std::string cachePath = path;
278 std::string cacheInfoPath = cachePath + "/" + m_modelName + "cache_info.nncache";
279 std::ofstream cacheInfoStream(cacheInfoPath, std::ios::binary | std::ios::out | std::ios::trunc);
280 if (cacheInfoStream.fail()) {
281 LOGE("[NNCompiledCache] WriteCacheInfo failed, model cache info file is invalid.");
282 return OH_NN_INVALID_FILE;
283 }
284
285 if (!cacheInfoStream.write(reinterpret_cast<const char*>(cacheInfo.get()), cacheSize)) {
286 LOGE("[NNCompiledCache] WriteCacheInfo failed, fail to write cache info.");
287 cacheInfoStream.close();
288 return OH_NN_SAVE_CACHE_EXCEPTION;
289 }
290
291 cacheInfoStream.close();
292 return OH_NN_SUCCESS;
293 }
294
CheckCacheInfo(NNCompiledCacheInfo & modelCacheInfo,const std::string & cacheInfoPath) const295 OH_NN_ReturnCode NNCompiledCache::CheckCacheInfo(NNCompiledCacheInfo& modelCacheInfo,
296 const std::string& cacheInfoPath) const
297 {
298 // cacheInfoPath is validated outside.
299 std::ifstream infoCacheFile(cacheInfoPath.c_str(), std::ios::in | std::ios::binary);
300 if (!infoCacheFile) {
301 LOGE("[NNCompiledCache] CheckCacheInfo failed, error happened when opening cache info file.");
302 return OH_NN_INVALID_FILE;
303 }
304
305 int charNumber = NUMBER_CACHE_INFO_MEMBERS * sizeof(uint64_t);
306 if (!infoCacheFile.read(reinterpret_cast<char*>(&(modelCacheInfo)), charNumber)) {
307 LOGE("[NNCompiledCache] CheckCacheInfo failed, error happened when reading cache info file.");
308 infoCacheFile.close();
309 return OH_NN_INVALID_FILE;
310 }
311
312 // modelCacheInfo.deviceId type is int64_t,
313 // it is transformed from size_t value, so the transform here will not truncate value.
314 size_t deviceId = static_cast<size_t>(modelCacheInfo.deviceId);
315 if (deviceId != m_backendID) {
316 LOGE("[NNCompiledCache] CheckCacheInfo failed. The deviceId in the cache files "
317 "is different from current deviceId,"
318 "please change the cache directory or current deviceId.");
319 infoCacheFile.close();
320 return OH_NN_INVALID_PARAMETER;
321 }
322
323 std::vector<int64_t> modelCheckSum;
324 modelCheckSum.resize(modelCacheInfo.fileNumber);
325 modelCacheInfo.modelCheckSum.resize(modelCacheInfo.fileNumber);
326 if (!infoCacheFile.read(reinterpret_cast<char*>(&modelCheckSum[0]),
327 modelCacheInfo.fileNumber * sizeof(uint64_t))) {
328 LOGE("[NNCompiledCache] CheckCacheInfo failed. The info cache file has been changed.");
329 infoCacheFile.close();
330 return OH_NN_INVALID_FILE;
331 }
332
333 for (uint32_t i = 0; i < modelCacheInfo.fileNumber; ++i) {
334 modelCacheInfo.modelCheckSum[i] = static_cast<unsigned short>(modelCheckSum[i]);
335 }
336
337 if (!infoCacheFile.read(reinterpret_cast<char*>(&(modelCacheInfo.opVersion)), sizeof(uint64_t))) {
338 LOGW("[NNCompiledCache] opVersion failed.");
339 }
340
341 infoCacheFile.close();
342 return OH_NN_SUCCESS;
343 }
344
ReadCacheModelFile(const std::string & filePath,OHOS::NeuralNetworkRuntime::Buffer & cache) const345 OH_NN_ReturnCode NNCompiledCache::ReadCacheModelFile(const std::string& filePath,
346 OHOS::NeuralNetworkRuntime::Buffer& cache) const
347 {
348 // filePath is validate in NNCompiledCache::Restore, no need to check again.
349 std::ifstream ifs(filePath.c_str(), std::ios::in | std::ios::binary);
350 if (!ifs) {
351 LOGE("[NNCompiledCache] ReadCacheModelFile failed, file is invalid.");
352 return OH_NN_INVALID_FILE;
353 }
354
355 int fsize{-1};
356 OH_NN_ReturnCode ret = GetCacheFileLength(ifs, fsize);
357 if (ret != OH_NN_SUCCESS) {
358 ifs.close();
359 LOGE("[NNCompiledCache] ReadCacheModelFile failed, get file %{public}s length fialed.", filePath.c_str());
360 return ret;
361 }
362
363 ifs.seekg(0, std::ios::beg);
364 if (!ifs.good()) {
365 LOGE("[NNCompiledCache] ReadCacheModelFile failed, file is invalid.");
366 ifs.close();
367 return OH_NN_INVALID_FILE;
368 }
369
370 char* ptr = static_cast<char*>(m_device->AllocateBuffer(fsize));
371 if (ptr == nullptr) {
372 LOGE("[NNCompiledCache] ReadCacheModelFile failed, failed to allocate memory.");
373 ifs.close();
374 return OH_NN_MEMORY_ERROR;
375 }
376
377 ifs.read(ptr, fsize);
378 if (!ifs.good()) {
379 LOGE("[NNCompiledCache] ReadCacheModelFile failed, failed to read file.");
380 ifs.close();
381 m_device->ReleaseBuffer(ptr);
382 ptr = nullptr;
383 return OH_NN_INVALID_FILE;
384 }
385
386 ifs.close();
387 cache.data = ptr;
388 cache.length = static_cast<size_t>(fsize); // fsize should be non-negative, safe to cast.
389 return OH_NN_SUCCESS;
390 }
391
GetCrc16(char * buffer,size_t length) const392 unsigned short NNCompiledCache::GetCrc16(char* buffer, size_t length) const
393 {
394 unsigned int sum = 0;
395 while (length > 1) {
396 sum += *(reinterpret_cast<unsigned short*>(buffer));
397 length -= sizeof(unsigned short);
398 buffer += sizeof(unsigned short);
399 }
400
401 if (length > 0) {
402 sum += *(reinterpret_cast<unsigned char*>(buffer));
403 }
404
405 while (sum >> HEX_UNIT) {
406 sum = (sum >> HEX_UNIT) + (sum & 0xffff);
407 }
408
409 return static_cast<unsigned short>(~sum);
410 }
411
GetCacheFileLength(std::ifstream & ifs,int & fileSize) const412 OH_NN_ReturnCode NNCompiledCache::GetCacheFileLength(std::ifstream& ifs, int& fileSize) const
413 {
414 ifs.seekg(0, std::ios::end);
415 if (!ifs.good()) {
416 LOGE("[NNCompiledCache] GetCacheFileLength failed, fail to set the position of the next character "
417 "to be extracted from the input stream.");
418 return OH_NN_FAILED;
419 }
420
421 int handleValue = ifs.tellg();
422 if (handleValue == -1) {
423 LOGE("[NNCompiledCache] GetCacheFileLength failed, fail to get position of the input stream.");
424 return OH_NN_INVALID_FILE;
425 }
426
427 if ((handleValue > MAX_MODEL_SIZE) || (handleValue == NULL_PTR_LENGTH)) {
428 LOGE("[NNCompiledCache] GetCacheFileLength failed, unable to read huge or empty input stream, "
429 "get cache file size=%{public}d",
430 handleValue);
431 return OH_NN_INVALID_FILE;
432 }
433
434 fileSize = handleValue;
435 return OH_NN_SUCCESS;
436 }
437
VerifyCachePath(const std::string & cachePath) const438 OH_NN_ReturnCode NNCompiledCache::VerifyCachePath(const std::string& cachePath) const
439 {
440 // exception: input path is not start with '/'.
441 if (cachePath.find(ROOT_DIR_STR) != size_t(0)) {
442 LOGE("[NNCompiledCache] VerifyCachePath failed, input file dir=%{public}s is invalid, "
443 "should start with '/'.",
444 cachePath.c_str());
445 return OH_NN_INVALID_FILE;
446 }
447
448 // exception: input path contains continuous double '/'.
449 if (cachePath.find(DOUBLE_SLASH_STR) != std::string::npos) {
450 LOGE("[NNCompiledCache] VerifyCachePath failed, input file dir=%{public}s is invalid, "
451 "containing double '/'.",
452 cachePath.c_str());
453 return OH_NN_INVALID_FILE;
454 }
455
456 return OH_NN_SUCCESS;
457 }
458 } // namespace NeuralNetworkRuntime
459 } // namespace OHOS
460