Searched refs:kPrimSoftmax (Results 1 – 6 of 6) sorted by relevance
/third_party/mindspore/mindspore/core/ops/ |
D | softmax.cc | 84 REGISTER_PRIMITIVE_EVAL_IMPL(Softmax, prim::kPrimSoftmax, SoftmaxInfer, nullptr, true);
|
/third_party/mindspore/mindspore/lite/tools/optimizer/fusion/ |
D | multi_head_attention_fusion.cc | 84 auto is_softmax = std::make_shared<CondVar>(std::bind(IsOpType, p1, prim::kPrimSoftmax)); in DefineMPWithMaskPattern() 217 auto is_softmax = std::make_shared<CondVar>(std::bind(IsOpType, p1, prim::kPrimSoftmax)); in DefineMPWithoutMaskPattern()
|
D | tflite_rel_pos_multi_head_attention_fusion.cc | 133 auto is_softmax = std::make_shared<CondVar>(std::bind(IsOpType, p1, prim::kPrimSoftmax)); in DefinePatterns()
|
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/graph_kernel/ |
D | graph_kernel_expander.cc | 89 {kGPUDevice, OpLevel_1, prim::kPrimSoftmax}, in GetExpandOps()
|
/third_party/mindspore/mindspore/core/base/ |
D | core_ops.h | 265 inline const PrimitivePtr kPrimSoftmax = std::make_shared<Primitive>("Softmax"); variable
|
/third_party/mindspore/mindspore/lite/tools/optimizer/graph/ |
D | slice_prepose_pass.cc | 1477 if (CheckPrimitiveType(preceed_cnode, prim::kPrimSoftmax)) { in DoPrepose()
|