Searched refs:nnModelCache (Results 1 – 2 of 2) sorted by relevance
/hardware/interfaces/neuralnetworks/utils/adapter/hidl/src/ |
D | Device.cpp | 180 auto nnModelCache = NN_TRY(convertInput(modelCache)); in prepareModel_1_2() local 185 nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache), in prepareModel_1_2() 188 nnModelCache, nnDataCache, nnToken, {}, {}); in prepareModel_1_2() 210 auto nnModelCache = NN_TRY(convertInput(modelCache)); in prepareModel_1_3() local 215 nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache), in prepareModel_1_3() 218 nnModelCache, nnDataCache, nnToken, {}, {}); in prepareModel_1_3() 236 auto nnModelCache = NN_TRY(convertInput(modelCache)); in prepareModelFromCache() local 240 Task task = [device, nnModelCache = std::move(nnModelCache), in prepareModelFromCache() 242 auto result = device->prepareModelFromCache({}, nnModelCache, nnDataCache, nnToken); in prepareModelFromCache() 260 auto nnModelCache = NN_TRY(convertInput(modelCache)); in prepareModelFromCache_1_3() local [all …]
|
/hardware/interfaces/neuralnetworks/utils/adapter/aidl/src/ |
D | Device.cpp | 177 auto nnModelCache = NN_TRY(convertInput(modelCache)); in prepareModel() local 184 nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache), in prepareModel() 188 device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline, nnModelCache, in prepareModel() 207 auto nnModelCache = NN_TRY(convertInput(modelCache)); in prepareModelFromCache() local 211 auto task = [device, nnDeadline, nnModelCache = std::move(nnModelCache), in prepareModelFromCache() 213 auto result = device->prepareModelFromCache(nnDeadline, nnModelCache, nnDataCache, nnToken); in prepareModelFromCache()
|