Home
last modified time | relevance | path

Searched refs:isInnerMem (Results 1 – 2 of 2) sorted by relevance

/ohos5.0/foundation/ai/neural_network_runtime/frameworks/native/neural_network_runtime/
H A Dnnexecutor.cpp502 std::shared_ptr<NNTensor> inputTensor, const void* inputBuffer, size_t length, bool isInnerMem) in SetInputTensorWithNewBuffer() argument
506 if (m_inputTensors[index].isInnerMem) { in SetInputTensorWithNewBuffer()
518 ExeTensor exeTensor{inputTensor, nullptr, 0, isInnerMem}; in SetInputTensorWithNewBuffer()
602 … if ((m_inputTensors.find(index) != m_inputTensors.end()) && (m_inputTensors[index].isInnerMem)) { in SetInput()
752 if (m_outputTensors[index].isInnerMem) { in SetOutput()
787 m_outputTensors[index].isInnerMem = true; in SetOutput()
816 if (m_outputTensors[index].isInnerMem) { in SetOutputFromMemory()
836 m_outputTensors[index].isInnerMem = false; in SetOutputFromMemory()
1054 if (m_outputTensors[i].isInnerMem) { in Run()
1083 if ((it.second).isInnerMem) { in ~NNExecutor()
[all …]
H A Dnnexecutor.h92 const void* inputBuffer, size_t length, bool isInnerMem);
107 bool isInnerMem {false};