/third_party/mindspore/mindspore/ccsrc/pipeline/jit/static_analysis/ |
D | auto_monad.cc | 111 RefInputs GetRefInputs(const CNodePtr &cnode) { in GetRefInputs() argument 113 MS_EXCEPTION_IF_NULL(cnode); in GetRefInputs() 114 for (size_t i = 1; i < cnode->size(); ++i) { in GetRefInputs() 115 auto &input = cnode->inputs().at(i); in GetRefInputs() 124 bool HasRefInput(const CNodePtr &cnode) { in HasRefInput() argument 125 if (cnode == nullptr || cnode->inputs().empty()) { in HasRefInput() 128 auto &inputs = cnode->inputs(); in HasRefInput() 152 PrimitivePtr GetPrimitive(const CNodePtr &cnode) { in GetPrimitive() argument 153 if (cnode == nullptr || cnode->inputs().empty()) { in GetPrimitive() 156 return GetPrimitive(cnode->input(0)); in GetPrimitive() [all …]
|
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/ascend/format_type/ |
D | deal_ref_and_split_unsupported_transdata.cc | 35 auto cnode = cur_node->cast<CNodePtr>(); in FindRefOriginNode() local 36 MS_EXCEPTION_IF_NULL(cnode); in FindRefOriginNode() 37 std::string op_name = AnfAlgo::GetCNodeName(cnode); in FindRefOriginNode() 38 auto op_info = mindspore::kernel::tbe::TbeDynamicShapeUtil::FindOp(op_name, cnode); in FindRefOriginNode() 44 if (in_index > cnode->inputs().size()) { in FindRefOriginNode() 45 … MS_LOG(EXCEPTION) << "ref op has wrong inputs: op inputs num is " << cnode->inputs().size() in FindRefOriginNode() 48 AnfNodePtr next_node = cnode->input(in_index + 1); in FindRefOriginNode() 55 … op_name == prim::kPrimReshape->name() || op_name == kTransDataOpName || opt::IsNopNode(cnode)) { in FindRefOriginNode() 56 AnfNodePtr next_node = cnode->input(1); in FindRefOriginNode() 65 … const CNodePtr &cnode, const size_t output_index, in AddRefNodePairToKernelGraph() argument [all …]
|
D | insert_cast.cc | 34 const CNodePtr &cnode) { in InsertCastForMultipleOutput() argument 36 MS_EXCEPTION_IF_NULL(cnode); in InsertCastForMultipleOutput() 41 manager->SetEdge(update_state.first, update_state.second, cnode); in InsertCastForMultipleOutput() 47 size_t out_num = AnfAlgo::GetOutputTensorNum(cnode); in InsertCastForMultipleOutput() 50 const auto origin_shape = AnfAlgo::GetOutputDetailShape(cnode, output_idx); in InsertCastForMultipleOutput() 51 const auto origin_type = AnfAlgo::GetOutputInferDataType(cnode, output_idx); in InsertCastForMultipleOutput() 56 auto getitem = func_graph->NewCNode({NewValueNode(prim::kPrimTupleGetItem), cnode, idx}); in InsertCastForMultipleOutput() 57 auto abs = cnode->abstract()->cast<abstract::AbstractTuplePtr>(); in InsertCastForMultipleOutput() 62 const auto dev_fmt = AnfAlgo::GetOutputFormat(cnode, output_idx); in InsertCastForMultipleOutput() 63 const auto device_type = AnfAlgo::GetOutputDeviceDataType(cnode, output_idx); in InsertCastForMultipleOutput() [all …]
|
D | set_fracz_group_attr.cc | 37 AnfNodePtr GetOutputItem(const FuncGraphManagerPtr &manager, const CNodePtr &cnode, int64_t groups, in GetOutputItem() argument 39 if (AnfAlgo::GetOutputTensorNum(cnode) == 1) { in GetOutputItem() 40 return cnode; in GetOutputItem() 42 std::vector<AnfNodePtr> depend_nodes{cnode}; in GetOutputItem() 74 auto cnode = node->cast<CNodePtr>(); in HasFraczGroupAttrAndSet() local 75 auto node_name = AnfAlgo::GetCNodeName(cnode); in HasFraczGroupAttrAndSet() 81 if (AnfAlgo::HasNodeAttr(kAttrFracZGroupIdx, cnode)) { in HasFraczGroupAttrAndSet() 82 auto fz_group_idx = AnfAlgo::GetNodeAttr<std::vector<int64_t>>(cnode, kAttrFracZGroupIdx); in HasFraczGroupAttrAndSet() 87 AnfAlgo::SetNodeAttr(kAttrFracZGroupIdx, MakeValue(fz_group_idx), cnode); in HasFraczGroupAttrAndSet() 90 AnfAlgo::SetNodeAttr(kAttrFracZGroupIdx, MakeValue(std::vector<int64_t>{index_l}), cnode); in HasFraczGroupAttrAndSet() [all …]
|
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/pass/ |
D | optimize_dependence.cc | 34 CNodePtr CreateNewDependNode(const FuncGraphPtr &func_graph, const CNodePtr &cnode, in CreateNewDependNode() argument 37 MS_EXCEPTION_IF_NULL(cnode); in CreateNewDependNode() 42 new_depend->set_abstract(cnode->abstract()); in CreateNewDependNode() 43 new_depend->set_scope(cnode->scope()); in CreateNewDependNode() 46 auto new_depend = kernel_graph->NewCNode(cnode); in CreateNewDependNode() 52 CNodePtr CheckIsolatedVirtualNode(const CNodePtr &cnode) { in CheckIsolatedVirtualNode() argument 53 MS_EXCEPTION_IF_NULL(cnode); in CheckIsolatedVirtualNode() 54 if (AnfAlgo::GetCNodeName(cnode) != prim::kPrimDepend->name() && in CheckIsolatedVirtualNode() 55 AnfAlgo::GetCNodeName(cnode) != prim::kPrimLoad->name()) { in CheckIsolatedVirtualNode() 58 auto virtual_input_op = AnfAlgo::GetInputNode(cnode, kIsolatedDependVirtualInputIndex); in CheckIsolatedVirtualNode() [all …]
|
D | replace_node_by_proxy.cc | 25 kernel::KernelBuildInfoPtr ReplaceNodeByProxy::GenerateKernelBuildInfo(const CNodePtr &cnode) { in GenerateKernelBuildInfo() argument 26 MS_EXCEPTION_IF_NULL(cnode); in GenerateKernelBuildInfo() 33 size_t input_num = AnfAlgo::GetInputTensorNum(cnode); in GenerateKernelBuildInfo() 35 inputs_device_format.push_back(AnfAlgo::GetInputFormat(cnode, input_index)); in GenerateKernelBuildInfo() 36 inputs_device_type.push_back(AnfAlgo::GetInputDeviceDataType(cnode, input_index)); in GenerateKernelBuildInfo() 38 size_t output_num = AnfAlgo::GetOutputTensorNum(cnode); in GenerateKernelBuildInfo() 40 outputs_device_format.push_back(AnfAlgo::GetOutputFormat(cnode, output_index)); in GenerateKernelBuildInfo() 41 outputs_device_type.push_back(AnfAlgo::GetOutputDeviceDataType(cnode, output_index)); in GenerateKernelBuildInfo() 42 outputs_shape.push_back(AnfAlgo::GetOutputInferShape(cnode, output_index)); in GenerateKernelBuildInfo() 44 builder.SetFusionType(AnfAlgo::GetFusionType(cnode)); in GenerateKernelBuildInfo() [all …]
|
/third_party/mindspore/mindspore/ccsrc/runtime/device/ascend/executor/ |
D | ai_cpu_dynamic_kernel.cc | 49 auto cnode = cnode_ptr_.lock(); in Execute() local 50 MS_EXCEPTION_IF_NULL(cnode); in Execute() 51 MS_LOG(INFO) << "Execute AiCpuDynamicKerenl Start, op name: " << cnode->fullname_with_scope(); in Execute() 62 auto cnode = cnode_ptr_.lock(); in Initialize() local 63 MS_EXCEPTION_IF_NULL(cnode); in Initialize() 64 MS_LOG(INFO) << "Initialize node:" << cnode->fullname_with_scope(); in Initialize() 67 input_num_ = AnfAlgo::GetInputTensorNum(cnode); in Initialize() 68 output_num_ = AnfAlgo::GetOutputTensorNum(cnode); in Initialize() 71 auto op_name = AnfAlgo::GetCNodeName(cnode); in Initialize() 79 …std::make_shared<AicpuExtInfoHandler>(cnode->fullname_with_scope(), input_num_, output_num_, shape… in Initialize() [all …]
|
D | hccl_dynamic_kernel.cc | 39 auto cnode = cnode_ptr_.lock(); in UpdateArgs() local 40 MS_EXCEPTION_IF_NULL(cnode); in UpdateArgs() 41 MS_LOG(INFO) << "Start to UpdateArgs. Node info: " << cnode->DebugString(); in UpdateArgs() 42 auto kernel_mod = AnfAlgo::GetKernelMod(cnode); in UpdateArgs() 48 …KernelRuntime::GenLaunchArgs(*kernel_mod, cnode, &kernel_inputs, &kernel_workspaces, &kernel_outpu… in UpdateArgs() 50 MS_LOG(EXCEPTION) << "Inputs or outputs is empty. Node info: " << cnode->DebugString(); in UpdateArgs() 62 if (!HcomUtil::GetKernelInputShape(cnode, &hccl_kernel_input_shape_list)) { in UpdateArgs() 63 MS_LOG(EXCEPTION) << "GetKernelInputShape fail! Node info: " << cnode->DebugString(); in UpdateArgs() 67 if (!HcomUtil::GetHcomDataType(cnode, &hccl_data_type_list)) { in UpdateArgs() 68 MS_LOG(EXCEPTION) << "GetHcomDataType fail! Node info: " << cnode->DebugString(); in UpdateArgs() [all …]
|
D | ai_core_dynamic_kernel.cc | 46 auto cnode = cnode_ptr_.lock(); in Execute() local 47 MS_EXCEPTION_IF_NULL(cnode); in Execute() 48 auto node_info = cnode->fullname_with_scope(); in Execute() 65 MS_LOG(INFO) << "Execute node:" << cnode->fullname_with_scope() << " success."; in Execute() 69 auto cnode = cnode_ptr_.lock(); in ParseCompileJson() local 70 MS_EXCEPTION_IF_NULL(cnode); in ParseCompileJson() 71 if (!AnfAlgo::IsDynamicShape(cnode)) { in ParseCompileJson() 78 if (!AnfAlgo::HasNodeAttr(kAttrCompileInfo, cnode)) { in ParseCompileJson() 79 MS_LOG(EXCEPTION) << "Get compile info failed. node name: " << AnfAlgo::GetCNodeName(cnode); in ParseCompileJson() 81 op_compile_info_ = AnfAlgo::GetNodeAttr<std::string>(cnode, kAttrCompileInfo); in ParseCompileJson() [all …]
|
/third_party/mindspore/mindspore/lite/tools/optimizer/graph/ |
D | transpose_strategy.cc | 39 STATUS GetPostNodes(const FuncGraphPtr &func_graph, const CNodePtr &cnode, std::vector<AnfNodePtr> … in GetPostNodes() argument 40 MS_ASSERT(func_graph != nullptr && cnode != nullptr && out_nodes != nullptr); in GetPostNodes() 49 auto node_users = manager->node_users()[cnode]; in GetPostNodes() 59 bool JudgeIs4DInput(NodeInferShape *node_infer_shape, const CNodePtr &cnode) { in JudgeIs4DInput() argument 60 MS_ASSERT(node_infer_shape != nullptr && cnode != nullptr); in JudgeIs4DInput() 61 auto shape = node_infer_shape->GetInputShape(cnode, 1); in JudgeIs4DInput() 63 if (cnode->size() > kInputSizeTwo) { in JudgeIs4DInput() 64 shape = node_infer_shape->GetInputShape(cnode, kInputIndexTwo); in JudgeIs4DInput() 93 int TransformAttrByAxes(const FuncGraphPtr &func_graph, const CNodePtr &cnode, size_t input_index, in TransformAttrByAxes() argument 96 MS_ASSERT(func_graph != nullptr && cnode != nullptr && node_infer_shape != nullptr); in TransformAttrByAxes() [all …]
|
D | redundant_op_remove_pass.cc | 31 int ProcessInputIsMonad(const FuncGraphPtr &func_graph, const CNodePtr &cnode) { in ProcessInputIsMonad() argument 32 MS_ASSERT(func_graph != nullptr && cnode != nullptr); in ProcessInputIsMonad() 33 auto first_input = cnode->input(1); in ProcessInputIsMonad() 36 first_input = cnode->input(1)->cast<CNodePtr>()->input(1); in ProcessInputIsMonad() 39 auto second_input = cnode->input(kInputIndexTwo); in ProcessInputIsMonad() 42 second_input = cnode->input(kInputIndexTwo)->cast<CNodePtr>()->input(1); in ProcessInputIsMonad() 69 manager->Replace(cnode, must_monad); in ProcessInputIsMonad() 71 manager->Replace(cnode, not_must_monad); in ProcessInputIsMonad() 76 int ProcessDependencyWithTwoNodes(const FuncGraphPtr &func_graph, const CNodePtr &cnode, bool pre_n… in ProcessDependencyWithTwoNodes() argument 77 MS_ASSERT(func_graph != nullptr && cnode != nullptr); in ProcessDependencyWithTwoNodes() [all …]
|
D | decrease_transpose_algo.cc | 105 for (auto &cnode : cnodes) { in SetTransType() local 107 …if (!CheckPrimitiveType(cnode, prim::kPrimTranspose) || GetTransposePerm(cnode, &perm) != lite::RE… in SetTransType() 149 int ConvertTensorToNCOrNH(const FuncGraphPtr &func_graph, const CNodePtr &cnode, size_t index, FmkT… in ConvertTensorToNCOrNH() argument 151 MS_ASSERT(cnode != nullptr); in ConvertTensorToNCOrNH() 152 if (utils::isa<CNodePtr>(cnode->input(index))) { in ConvertTensorToNCOrNH() 157 if (utils::isa<ParameterPtr>(cnode->input(index))) { in ConvertTensorToNCOrNH() 158 auto input_node = cnode->input(index)->cast<ParameterPtr>(); in ConvertTensorToNCOrNH() 163 status = lite::FetchDataFromParameterNode(cnode, index, fmk_type, train_flag, &data_info); in ConvertTensorToNCOrNH() 165 status = lite::FetchDataFromValueNode(cnode, index, fmk_type, train_flag, &data_info); in ConvertTensorToNCOrNH() 192 param_node->set_name(cnode->input(index)->fullname_with_scope()); in ConvertTensorToNCOrNH() [all …]
|
D | node_infershape.cc | 47 MS_ASSERT(cnode != nullptr); in RectifyFormat() 71 bool NodeInferShape::JudgeOpSupportInfer(const CNodePtr &cnode) { in JudgeOpSupportInfer() argument 72 MS_ASSERT(cnode != nullptr); in JudgeOpSupportInfer() 73 if (CheckPrimitiveType(cnode, prim::kPrimCustom)) { in JudgeOpSupportInfer() 76 auto prim_t = lite::GetPrimitiveT(cnode->input(0)); in JudgeOpSupportInfer() 89 STATUS NodeInferShape::InferShape(const CNodePtr &cnode) { in InferShape() argument 90 MS_ASSERT(cnode != nullptr); in InferShape() 91 auto anf_prim = GetValueNode<std::shared_ptr<Primitive>>(cnode->input(0)); in InferShape() 99 if (GetCNodeInputTensors(cnode, &inputs) != lite::RET_OK) { in InferShape() 104 if (GetCNodeOutputTensors(cnode, &outputs) != lite::RET_OK) { in InferShape() [all …]
|
/third_party/mindspore/mindspore/lite/tools/converter/quantizer/ |
D | weight_quantizer.cc | 88 STATUS WeightQuantizer::DoConvQuantize(const CNodePtr &cnode) { in DoConvQuantize() argument 89 auto primitive = GetValueNode<PrimitivePtr>(cnode->input(0)); in DoConvQuantize() 95 auto input_node = cnode->input(2); in DoConvQuantize() 110 …MS_LOG(WARNING) << cnode->fullname_with_scope() << " weight data type is not fp32 but " << tensor_… in DoConvQuantize() 139 STATUS WeightQuantizer::DoMulQuantize(const CNodePtr &cnode) { in DoMulQuantize() argument 140 MS_ASSERT(cnode != nullptr); in DoMulQuantize() 141 for (size_t i = 1; i < cnode->size(); i++) { in DoMulQuantize() 142 auto inputNode = cnode->input(i); in DoMulQuantize() 149 auto primitive = GetValueNode<PrimitivePtr>(cnode->input(0)); in DoMulQuantize() 174 MS_LOG(ERROR) << cnode->fullname_with_scope() << " input " << i in DoMulQuantize() [all …]
|
/third_party/mindspore/mindspore/lite/tools/converter/parser/onnx/ |
D | onnx_inputs_adjust.cc | 30 STATUS AddAttrToInput(const FuncGraphPtr &func_graph, const CNodePtr &cnode, int input_num, in AddAttrToInput() argument 33 MS_ASSERT(cnode != nullptr); in AddAttrToInput() 34 if (!opt::CheckInputs(cnode)) { in AddAttrToInput() 38 auto primitive_c = GetValueNode<PrimitiveCPtr>(cnode->input(0)); in AddAttrToInput() 42 auto inputs = cnode->inputs(); in AddAttrToInput() 56 …opt::BuildIntVecParameterNode(func_graph, value_data, cnode->fullname_with_scope() + "_" + attr_na… in AddAttrToInput() 61 tr.AddEdge(cnode, param_node); in AddAttrToInput() 126 STATUS ReplaceConstant(const FuncGraphPtr &func_graph, const CNodePtr &cnode) { in ReplaceConstant() argument 128 MS_ASSERT(cnode != nullptr); in ReplaceConstant() 129 if (cnode->inputs().empty() || cnode->input(0) == nullptr) { in ReplaceConstant() [all …]
|
/third_party/mindspore/mindspore/ccsrc/runtime/hccl_adapter/ |
D | converter.cc | 87 static T ConvertAttr(const CNodePtr &cnode, const ge::OpDescPtr &ge_op, const std::string &anf_attr… in ConvertAttr() argument 89 MS_EXCEPTION_IF_NULL(cnode); in ConvertAttr() 91 if (!AnfAlgo::HasNodeAttr(anf_attr_name, cnode)) { in ConvertAttr() 92 … MS_LOG(INFO) << "Node " << cnode->DebugString() << " has no attr " << anf_attr_name << ", skip."; in ConvertAttr() 97 auto attr = AnfAlgo::GetNodeAttr<T>(cnode, anf_attr_name); in ConvertAttr() 107 …MS_LOG(EXCEPTION) << "Set attr " << ge_attr_name << " for ge node of " << cnode->DebugString() << … in ConvertAttr() 125 static void SetAllToAllvAttr(const CNodePtr &cnode, const ge::OpDescPtr &ge_op, const std::string &… in SetAllToAllvAttr() argument 126 MS_EXCEPTION_IF_NULL(cnode); in SetAllToAllvAttr() 128 if (!IsPrimitiveCNode(cnode, prim::kPrimAllToAllv)) { in SetAllToAllvAttr() 136 mindspore::hccl::AllToAllvCalcParam calc(cnode, rank_size); in SetAllToAllvAttr() [all …]
|
/third_party/mindspore/mindspore/lite/tools/converter/parser/ |
D | unify_format.cc | 36 STATUS DecideMINDIRConvWeightSrcFormat(const CNodePtr &cnode, schema::Format *src_format) { in DecideMINDIRConvWeightSrcFormat() argument 37 MS_ASSERT(cnode != nullptr && src_format != nullptr); in DecideMINDIRConvWeightSrcFormat() 38 auto prim = GetValueNode<PrimitivePtr>(cnode->input(0)); in DecideMINDIRConvWeightSrcFormat() 50 MS_LOG(ERROR) << "cnode format is invalid. " << cnode->fullname_with_scope(); in DecideMINDIRConvWeightSrcFormat() 56 STATUS DecideTFConvWeightSrcFormat(const CNodePtr &cnode, schema::Format *src_format) { in DecideTFConvWeightSrcFormat() argument 57 MS_ASSERT(cnode != nullptr && src_format != nullptr); in DecideTFConvWeightSrcFormat() 58 auto prim = GetValueNode<PrimitivePtr>(cnode->input(0)); in DecideTFConvWeightSrcFormat() 64 if (opt::CheckPrimitiveType(cnode, prim::kPrimConv2DFusion)) { in DecideTFConvWeightSrcFormat() 70 } else if (opt::CheckPrimitiveType(cnode, prim::kPrimConv2dTransposeFusion) && !is_depth_wise) { in DecideTFConvWeightSrcFormat() 73 MS_LOG(ERROR) << "depthwise-conv2dTranspose need to check. " << cnode->fullname_with_scope(); in DecideTFConvWeightSrcFormat() [all …]
|
D | inputs_adjust.cc | 29 STATUS InputAdjust::AddAttrToInput(const FuncGraphPtr &func_graph, const CNodePtr &cnode, int input… in AddAttrToInput() argument 31 MS_ASSERT(cnode != nullptr); in AddAttrToInput() 33 if (!opt::CheckInputs(cnode)) { in AddAttrToInput() 37 MS_CHECK_TRUE_MSG(cnode->input(0) != nullptr, RET_ERROR, "cnode->input(0) is nullptr"); in AddAttrToInput() 38 auto primitive_c = GetValueNode<PrimitiveCPtr>(cnode->input(0)); in AddAttrToInput() 45 if (static_cast<int>(cnode->size()) > input_num) { in AddAttrToInput() 47 MS_LOG(DEBUG) << "input num has been meet, which is " << cnode->size(); in AddAttrToInput() 49 } else if (static_cast<int>(cnode->size()) < input_num) { in AddAttrToInput() 59 …opt::BuildIntValueParameterNode(func_graph, value_data, cnode->fullname_with_scope() + "_" + attr_… in AddAttrToInput() 65 …opt::BuildIntVecParameterNode(func_graph, value_data, cnode->fullname_with_scope() + "_" + attr_na… in AddAttrToInput() [all …]
|
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/ |
D | add_input_to_output.cc | 27 void GetInputOrOutputNames(const CNodePtr &cnode, const std::string &attr_name, std::vector<std::st… in GetInputOrOutputNames() argument 29 auto primitive = AnfAlgo::GetCNodePrimitive(cnode); in GetInputOrOutputNames() 38 void AddOutputs(const CNodePtr &cnode, const std::vector<size_t> &input_indices) { in AddOutputs() argument 39 MS_EXCEPTION_IF_NULL(cnode); in AddOutputs() 41 GetInputOrOutputNames(cnode, kAttrInputNames, &input_names_vec); in AddOutputs() 43 GetInputOrOutputNames(cnode, kAttrOutputNames, &output_names_vec); in AddOutputs() 45 auto origin_abstract = cnode->abstract(); in AddOutputs() 58 if (index + 1 >= cnode->inputs().size()) { in AddOutputs() 60 << "node: " << cnode->DebugString(); in AddOutputs() 63 auto node_to_output = cnode->input(index + 1); in AddOutputs() [all …]
|
D | layer_norm_beta_gamma_backprop_fusion.cc | 55 bool CheckKernelBuildInfo(const CNodePtr &cnode, const kernel::KernelBuildInfoPtr &kernel_info) { in CheckKernelBuildInfo() argument 56 MS_EXCEPTION_IF_NULL(cnode); in CheckKernelBuildInfo() 60 kernel_info->GetInputFormat(i) != AnfAlgo::GetInputFormat(cnode, i)) { in CheckKernelBuildInfo() 66 kernel_info->GetOutputFormat(i) != AnfAlgo::GetOutputFormat(cnode, i)) { in CheckKernelBuildInfo() 73 bool CheckLayernormBetaGammaBackprop(const FuncGraphPtr &func_graph, const CNodePtr &cnode, in CheckLayernormBetaGammaBackprop() argument 75 MS_EXCEPTION_IF_NULL(cnode); in CheckLayernormBetaGammaBackprop() 76 if (!AnfAlgo::HasNodeAttr(kAttrShapeGamma, cnode)) { in CheckLayernormBetaGammaBackprop() 77 MS_LOG(INFO) << "The node " << cnode->DebugString() << " has no " << kAttrShapeGamma << " attr"; in CheckLayernormBetaGammaBackprop() 80 if (AnfAlgo::GetInputTensorNum(cnode) != kLayerNormBetaGammaBackpropInputTensorNum) { in CheckLayernormBetaGammaBackprop() 81 MS_LOG(INFO) << "The node " << cnode->DebugString() << " inputs num is not equal to " in CheckLayernormBetaGammaBackprop() [all …]
|
/third_party/mindspore/mindspore/ccsrc/frontend/optimizer/ad/ |
D | kpynative.cc | 268 bool KPynativeOp(const CNodePtr &cnode, const ValuePtrList &op_args, const ValuePtr &out); 269 bool KPynativeWithBProp(const CNodePtr &cnode, const ValuePtrList &op_args, const ValuePtr &out, 291 PynativeAdjointPtr ForgeCNodeAdjoint(const CNodePtr &cnode); 292 PynativeAdjointPtr ForgeGetItemAdjoint(const CNodePtr &cnode); 293 PynativeAdjointPtr ForgeMakeSequenceAdjoint(const CNodePtr &cnode); 294 bool BuildAdjoint(const CNodePtr &cnode, const ValuePtrList &op_args, const ValuePtr &out, 297 void BuildAdjointForInput(const CNodePtr &cnode, const ValuePtrList &op_args); 305 …bool BackPropagateOneCNodeWithBPropFuncGraph(const CNodePtr &cnode, const PynativeAdjointPtr &adjo… 307 …bool BackPropagateOneCNodeWithFPropFuncGraph(const CNodePtr &cnode, const PynativeAdjointPtr &adjo… 312 …const AnfNodePtrList BuildKNodeListFromPrimalCNode(const CNodePtr &cnode, const PynativeAdjointPtr… [all …]
|
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/ |
D | depthwiseconv_eltwise_fusion_pass.cc | 26 void DepthwiseConvEltwiseFusionPass::MatchDepthwiseConvRelu(const CNodePtr &cnode, in MatchDepthwiseConvRelu() argument 29 MS_EXCEPTION_IF_NULL(cnode); in MatchDepthwiseConvRelu() 33 auto depthwise_conv = cnode->input(kIndex1); in MatchDepthwiseConvRelu() 35 if (cnode->isa<CNode>() && IsPrimitiveCNode(depthwise_conv, prim::kPrimDepthwiseConv2dNative)) { in MatchDepthwiseConvRelu() 36 std::unordered_set<AnfNodePtr> record{cnode, depthwise_conv}; in MatchDepthwiseConvRelu() 42 auto relu = cnode->input(kIndex1); in MatchDepthwiseConvRelu() 44 …if (cnode->isa<CNode>() && (IsPrimitiveCNode(relu, prim::kPrimRelu) || IsPrimitiveCNode(relu, prim… in MatchDepthwiseConvRelu() 45 std::unordered_set<AnfNodePtr> record{cnode, relu}; in MatchDepthwiseConvRelu() 61 auto cnode = node->cast<CNodePtr>(); in MatchSingleFusionPattern() local 62 MS_EXCEPTION_IF_NULL(cnode); in MatchSingleFusionPattern() [all …]
|
/third_party/mindspore/mindspore/ccsrc/runtime/device/executor/ |
D | dynamic_kernel.cc | 33 auto cnode = cnode_ptr_.lock(); in Initialize() local 34 MS_EXCEPTION_IF_NULL(cnode); in Initialize() 35 is_dynamic_shape_ = AnfAlgo::IsDynamicShape(cnode); in Initialize() 37 MS_LOG(DEBUG) << "cnode is not dynamic shape:" << cnode->fullname_with_scope(); in Initialize() 41 is_input_dynamic_shape_ = AnfAlgo::GetBooleanAttr(cnode, kAttrInputIsDynamicShape); in Initialize() 42 is_output_dynamic_shape_ = AnfAlgo::GetBooleanAttr(cnode, kAttrOutputIsDynamicShape); in Initialize() 44 auto ret = abstract::GetDependsFormMap(cnode); in Initialize() 59 auto cnode = cnode_ptr_.lock(); in RebuildDependTensor() local 60 MS_EXCEPTION_IF_NULL(cnode); in RebuildDependTensor() 64 auto pre_node_with_index = AnfAlgo::GetPrevNodeOutput(cnode, depend); in RebuildDependTensor() [all …]
|
/third_party/mindspore/mindspore/lite/tools/optimizer/format/ |
D | to_format_base.cc | 29 STATUS ToFormatBase::GenNewInput(const FuncGraphPtr &func_graph, const CNodePtr &cnode, const std::… in GenNewInput() argument 31 MS_ASSERT(func_graph != nullptr && cnode != nullptr); in GenNewInput() 32 AnfNodePtr trans_input = before ? cnode->input(index) : cnode; in GenNewInput() 33 …std::string trans_name = before ? cnode->fullname_with_scope() + "_pre_" + std::to_string(index - … in GenNewInput() 34 : cnode->fullname_with_scope() + "_post"; in GenNewInput() 58 manager_->SetEdge(cnode, index, trans_cnode); in GenNewInput() 60 if (!manager_->Replace(cnode, trans_cnode)) { in GenNewInput() 68 STATUS ToFormatBase::ModifyCNode(const CNodePtr &cnode) { in ModifyCNode() argument 69 MS_ASSERT(cnode != nullptr); in ModifyCNode() 70 auto prim = GetValueNode<PrimitivePtr>(cnode->input(0)); in ModifyCNode() [all …]
|
/third_party/mindspore/mindspore/lite/tools/converter/parser/tflite/ |
D | tflite_inputs_adjust.cc | 40 bool CheckResize(const CNodePtr &cnode) { in CheckResize() argument 41 if (!opt::CheckPrimitiveType(cnode, prim::kPrimResize)) { in CheckResize() 44 auto prim_resize = GetValueNode<std::shared_ptr<ops::Resize>>(cnode->input(0)); in CheckResize() 54 lite::STATUS ReorderCnodeInputs(CNode *cnode, const std::vector<size_t> &perm) { in ReorderCnodeInputs() argument 56 MSLITE_CHECK_PTR(cnode); in ReorderCnodeInputs() 57 std::vector<AnfNodePtr> new_inputs = {cnode->input(0)}; in ReorderCnodeInputs() 58 auto primitive = GetValueNode<PrimitivePtr>(cnode->input(0)); in ReorderCnodeInputs() 73 if (idx > cnode->inputs().size() - 1) { in ReorderCnodeInputs() 74 … MS_LOG(ERROR) << "Idx " << idx << " is larger than inputs size: " << (cnode->inputs().size() - 1); in ReorderCnodeInputs() 77 new_inputs.emplace_back(cnode->input(idx)); in ReorderCnodeInputs() [all …]
|