• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Generated file (from: softmax_quant8_2.mod.py). Do not edit
CreateModel(Model * model)2 void CreateModel(Model *model) {
3   OperandType type1(Type::FLOAT32, {});
4   OperandType type2(Type::TENSOR_QUANT8_ASYMM, {2, 5}, 0.00390625f, 0);
5   OperandType type0(Type::TENSOR_QUANT8_ASYMM, {2, 5}, 0.5f, 0);
6   // Phase 1, operands
7   auto input = model->addOperand(&type0);
8   auto beta = model->addOperand(&type1);
9   auto output = model->addOperand(&type2);
10   // Phase 2, operations
11   static float beta_init[] = {1.0f};
12   model->setOperandValue(beta, beta_init, sizeof(float) * 1);
13   model->addOperation(ANEURALNETWORKS_SOFTMAX, {input, beta}, {output});
14   // Phase 3, inputs and outputs
15   model->identifyInputsAndOutputs(
16     {input},
17     {output});
18   assert(model->isValid());
19 }
20 
is_ignored(int i)21 bool is_ignored(int i) {
22   static std::set<int> ignore = {};
23   return ignore.find(i) != ignore.end();
24 }
25