/packages/modules/NeuralNetworks/runtime/test/ |
D | TestPartitioning.cpp | 166 using IOType = ::android::nn::IOType; typedef 3369 using TestStepRole = std::tuple<std::string, IOType>; 3384 ss << "{" << deviceName << ", " << (type == IOType::INPUT ? "INPUT" : "OUTPUT") << "} "; in toString() 3402 index, [&actual](const auto* preparedModel, IOType type, uint32_t) { in checkStepRolesOfInput() 3413 index, [&actual](const auto* preparedModel, IOType type, uint32_t) { in checkStepRolesOfOutput() 3425 index, [&actual](const auto* preparedModel, IOType type, uint32_t) { in checkStepRolesOfSourceOperand() 3459 checkStepRolesOfInput(0, {{"deviceA", IOType::INPUT}}); in TEST_F() 3460 checkStepRolesOfInput(2, {{"deviceB", IOType::INPUT}}); in TEST_F() 3462 checkStepRolesOfInput(1, {{"deviceA", IOType::INPUT}, {"deviceB", IOType::INPUT}}); in TEST_F() 3464 checkStepRolesOfOutput(0, {{"deviceA", IOType::OUTPUT}, {"deviceC", IOType::INPUT}}); in TEST_F() [all …]
|
/packages/modules/NeuralNetworks/runtime/ |
D | Memory.cpp | 51 bool validate(const CompilationBuilder*, IOType, uint32_t, const ANeuralNetworksOperandType*, in validate() argument 74 bool validate(const CompilationBuilder* compilation, IOType, uint32_t, in validate() argument 101 bool validate(const CompilationBuilder* compilation, IOType ioType, uint32_t index, in validate() 309 int MemoryBuilder::addRole(const CompilationBuilder& compilation, IOType ioType, uint32_t index, in addRole() 311 const char* tag = ioType == IOType::INPUT ? "addInputRole" : "addOutputRole"; in addRole() 321 std::vector<std::tuple<const RuntimePreparedModel*, IOType, uint32_t>> roles; in addRole() 322 auto callback = [&roles](const auto* preparedModel, IOType type, uint32_t index) { in addRole() 325 if (ioType == IOType::INPUT) { in addRole() 338 if (ioType == IOType::INPUT) { in addRole() 379 if (type == IOType::INPUT) { in addRole()
|
D | CompilationBuilder.cpp | 264 *alignment = mPlan.getMemoryPreference(IOType::INPUT, index).alignment; in getPreferredMemoryAlignmentForInput() 286 *padding = mPlan.getMemoryPreference(IOType::INPUT, index).padding; in getPreferredMemoryPaddingForInput() 309 *alignment = mPlan.getMemoryPreference(IOType::OUTPUT, index).alignment; in getPreferredMemoryAlignmentForOutput() 332 *padding = mPlan.getMemoryPreference(IOType::OUTPUT, index).padding; in getPreferredMemoryPaddingForOutput()
|
D | Memory.h | 97 using CompilationRole = std::tuple<const CompilationBuilder*, IOType, uint32_t>; 129 virtual bool validate(const CompilationBuilder* compilation, IOType ioType, uint32_t index, 226 int addRole(const CompilationBuilder& compilation, IOType ioType, uint32_t index, float freq);
|
D | ExecutionPlan.h | 563 using StepRole = std::tuple<uint32_t, IOType, uint32_t>; 566 using StepRoleCallback = std::function<void(const RuntimePreparedModel*, IOType, uint32_t)>; 749 MemoryPreference getMemoryPreference(IOType type, uint32_t index) const;
|
D | ExecutionPlan.cpp | 220 void addRole(const ExecutionStep& step, uint32_t operandIndex, IOType type, in addRole() 1012 analyzer.addRole(*step, sourceIndex, IOType::INPUT, i); in findMemoryStepRoles() 1017 analyzer.addRole(*step, sourceIndex, IOType::OUTPUT, i); in findMemoryStepRoles() 2005 callback(mPreparedModel.get(), IOType::INPUT, index); in forEachStepRoleOfInput() 2010 callback(mPreparedModel.get(), IOType::OUTPUT, index); in forEachStepRoleOfOutput() 2038 MemoryPreference ExecutionPlan::getMemoryPreference(IOType type, uint32_t index) const { in getMemoryPreference() 2043 const auto sourceOperandIndex = type == IOType::INPUT ? getInputSourceOperand(index) in getMemoryPreference() 2053 index, [&alignment, &padding](const auto* preparedModel, IOType, uint32_t) { in getMemoryPreferenceOfSourceOperand() argument
|
D | ExecutionBuilder.cpp | 231 if (!memory->getValidator().validate(mCompilation, IOType::INPUT, index, type, offset, in setInputFromMemory() 312 if (!memory->getValidator().validate(mCompilation, IOType::OUTPUT, index, type, offset, in setOutputFromMemory()
|
D | NeuralNetworks.cpp | 953 return mb->addRole(*c, IOType::INPUT, index, frequency); in ANeuralNetworksMemoryDesc_addInputRole() 966 return mb->addRole(*c, IOType::OUTPUT, index, frequency); in ANeuralNetworksMemoryDesc_addOutputRole()
|
D | ModelBuilder.cpp | 345 if (!memory->getValidator().validate(/*compilation=*/nullptr, /*placeholder*/ IOType::INPUT, in setOperandValueFromMemory()
|
/packages/modules/NeuralNetworks/common/include/nnapi/ |
D | Validation.h | 68 enum class IOType { INPUT, OUTPUT }; enum 69 using PreparedModelRole = std::tuple<const IPreparedModel*, IOType, uint32_t>;
|
/packages/modules/NeuralNetworks/common/ |
D | AidlValidateHal.cpp | 61 const auto [it, success] = roles.emplace(preparedModel.get(), IOType::INPUT, role.ioIndex); in validateMemoryDesc() 75 const auto [it, success] = roles.emplace(preparedModel.get(), IOType::OUTPUT, role.ioIndex); in validateMemoryDesc()
|
D | HalBufferTracker.cpp | 71 if (kRoles.count({preparedModel, IOType::INPUT, i}) == 0) { in validateRequest() 99 if (kRoles.count({preparedModel, IOType::OUTPUT, i}) == 0) { in validateRequest()
|
D | AidlBufferTracker.cpp | 71 if (kRoles.count({preparedModel, IOType::INPUT, i}) == 0) { in validateRequest() 99 if (kRoles.count({preparedModel, IOType::OUTPUT, i}) == 0) { in validateRequest()
|
D | BufferTracker.cpp | 71 if (kRoles.count({preparedModel, IOType::INPUT, i}) == 0) { in validateRequest() 98 if (kRoles.count({preparedModel, IOType::OUTPUT, i}) == 0) { in validateRequest()
|
D | ValidateHal.cpp | 892 const auto [it, success] = roles.emplace(preparedModel.get(), IOType::INPUT, role.ioIndex); in validateMemoryDesc() 906 const auto [it, success] = roles.emplace(preparedModel.get(), IOType::OUTPUT, role.ioIndex); in validateMemoryDesc()
|
D | Validation.cpp | 1288 const auto [it, success] = roles.emplace(preparedModel.get(), IOType::INPUT, role.ioIndex); in validateMemoryDescImpl() 1302 const auto [it, success] = roles.emplace(preparedModel.get(), IOType::OUTPUT, role.ioIndex); in validateMemoryDescImpl()
|
/packages/modules/NeuralNetworks/common/include/ |
D | AidlValidateHal.h | 32 using AidlHalPreparedModelRole = std::tuple<const aidl_hal::IPreparedModel*, IOType, uint32_t>;
|
D | ValidateHal.h | 33 using HalPreparedModelRole = std::tuple<const V1_3::IPreparedModel*, IOType, uint32_t>;
|