1 /* 2 * Copyright (c) 2021 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ENGINE_ADAPTER_H 17 #define ENGINE_ADAPTER_H 18 19 #include <cstdint> 20 21 namespace OHOS { 22 namespace AI { 23 // Status for Engine 24 enum EngineStatus { 25 IDLE = 1000, 26 PREPARING, 27 READY, 28 RUNNING, 29 }; 30 31 class EngineAdapter { 32 public: 33 virtual ~EngineAdapter() = default; 34 35 /* Initializes the algorithm and get the algorithm execution handle */ 36 virtual int32_t Init(const char *modelPath, intptr_t &handle) = 0; 37 38 /* De-Initializes all the algorithms. */ 39 virtual int32_t Deinit() = 0; 40 41 /* Makes the model based on the given handle Inference once. */ 42 virtual int32_t Invoke(intptr_t handle) = 0; 43 44 /* Gets the inputBuffer and inputSize after the handle related model is initialized. */ 45 virtual int32_t GetInputAddr(intptr_t handle, uint16_t nodeId, 46 uintptr_t &inputBuffer, size_t &inputSize) = 0; 47 48 /* Gets the outputBuffer and outputSize after the handle related model is initialized. */ 49 virtual int32_t GetOutputAddr(intptr_t handle, uint16_t nodeId, 50 uintptr_t &outputBuffer, size_t &outputSize) = 0; 51 52 /* Release the algorithm based on the given handle. */ 53 virtual int32_t ReleaseHandle(intptr_t handle) = 0; 54 }; 55 } // namespace AI 56 } // namespace OHOS 57 #endif // ENGINE_ADAPTER_H