1 /* 2 * Copyright (c) 2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef NEURAL_NETWORK_RUNTIME_HDI_DEVICE_V2_0_H 17 #define NEURAL_NETWORK_RUNTIME_HDI_DEVICE_V2_0_H 18 19 #include <v2_0/nnrt_types.h> 20 #include <v2_0/innrt_device.h> 21 #include <v2_0/iprepared_model.h> 22 #include "refbase.h" 23 24 #include "device.h" 25 26 namespace OHOS { 27 namespace NeuralNetworkRuntime { 28 namespace V2_0 = OHOS::HDI::Nnrt::V2_0; 29 class HDIDeviceV2_0 : public Device { 30 public: 31 explicit HDIDeviceV2_0(OHOS::sptr<V2_0::INnrtDevice> device); 32 33 OH_NN_ReturnCode GetDeviceName(std::string& name) override; 34 OH_NN_ReturnCode GetVendorName(std::string& name) override; 35 OH_NN_ReturnCode GetVersion(std::string& version) override; 36 OH_NN_ReturnCode GetDeviceType(OH_NN_DeviceType& deviceType) override; 37 OH_NN_ReturnCode GetDeviceStatus(DeviceStatus& status) override; 38 OH_NN_ReturnCode GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model, 39 std::vector<bool>& ops) override; 40 41 OH_NN_ReturnCode IsFloat16PrecisionSupported(bool& isSupported) override; 42 OH_NN_ReturnCode IsPerformanceModeSupported(bool& isSupported) override; 43 OH_NN_ReturnCode IsPrioritySupported(bool& isSupported) override; 44 OH_NN_ReturnCode IsDynamicInputSupported(bool& isSupported) override; 45 OH_NN_ReturnCode IsModelCacheSupported(bool& isSupported) override; 46 47 OH_NN_ReturnCode PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model, 48 const ModelConfig& config, 49 std::shared_ptr<PreparedModel>& preparedModel) override; 50 OH_NN_ReturnCode PrepareModel(const void* metaGraph, 51 const ModelConfig& config, 52 std::shared_ptr<PreparedModel>& preparedModel) override; 53 OH_NN_ReturnCode PrepareModelFromModelCache(const std::vector<Buffer>& modelCache, 54 const ModelConfig& config, 55 std::shared_ptr<PreparedModel>& preparedModel, 56 bool& isUpdatable) override; 57 OH_NN_ReturnCode PrepareOfflineModel(std::shared_ptr<const mindspore::lite::LiteGraph> model, 58 const ModelConfig& config, 59 std::shared_ptr<PreparedModel>& preparedModel) override; 60 61 void* AllocateBuffer(size_t length) override; 62 void* AllocateTensorBuffer(size_t length, std::shared_ptr<TensorDesc> tensor) override; 63 void* AllocateTensorBuffer(size_t length, std::shared_ptr<NNTensor> tensor) override; 64 OH_NN_ReturnCode ReleaseBuffer(const void* buffer) override; 65 66 OH_NN_ReturnCode AllocateBuffer(size_t length, int& fd) override; 67 OH_NN_ReturnCode ReleaseBuffer(int fd, size_t length) override; 68 69 private: 70 OH_NN_ReturnCode ReleaseSharedBuffer(const V2_0::SharedBuffer& buffer); 71 OH_NN_ReturnCode GetOfflineModelFromLiteGraph(std::shared_ptr<const mindspore::lite::LiteGraph> graph, 72 std::vector<std::vector<uint8_t>>& offlineModels); 73 OH_NN_ReturnCode AllocateDeviceBufferForOfflineModel(const std::vector<std::vector<uint8_t>>& offlineModels, 74 std::vector<Buffer>& deviceBuffers); 75 OH_NN_ReturnCode CopyOfflineModelToDevice(const std::vector<std::vector<uint8_t>>& offlineModels, 76 std::vector<Buffer>& deviceBuffers); 77 OH_NN_ReturnCode PrepareOfflineModel(std::vector<Buffer>& deviceBuffers, 78 const ModelConfig& config, 79 const std::map<std::string, std::vector<int8_t>>& extensions, 80 std::shared_ptr<PreparedModel>& preparedModel); 81 82 private: 83 // first: major version, second: minor version 84 std::pair<uint32_t, uint32_t> m_hdiVersion; 85 OHOS::sptr<V2_0::INnrtDevice> m_iDevice {nullptr}; 86 }; 87 } // namespace NeuralNetworkRuntime 88 } // namespace OHOS 89 #endif // NEURAL_NETWORK_RUNTIME_HDI_DEVICE_V2_0_H 90