/third_party/mindspore/mindspore/core/ops/fusion/ |
D | arg_max_fusion.cc | 21 void ArgMaxFusion::Init(const bool keep_dims, const bool out_max_value, const int64_t top_k, const … in Init() argument 23 set_keep_dims(keep_dims); in Init() 28 void ArgMaxFusion::set_keep_dims(const bool keep_dims) { (void)this->AddAttr(kKeepDims, MakeValue(k… in set_keep_dims() argument 35 auto keep_dims = GetAttr(kKeepDims); in get_keep_dims() local 36 MS_EXCEPTION_IF_NULL(keep_dims); in get_keep_dims() 37 return GetValue<bool>(keep_dims); in get_keep_dims()
|
D | arg_min_fusion.cc | 21 void ArgMinFusion::Init(bool keep_dims, bool out_max_value, int64_t top_k, int64_t axis) { in Init() argument 23 set_keep_dims(keep_dims); in Init() 28 void ArgMinFusion::set_keep_dims(const bool keep_dims) { (void)this->AddAttr(kKeepDims, MakeValue(k… in set_keep_dims() argument 33 auto keep_dims = GetAttr(kKeepDims); in get_keep_dims() local 34 MS_EXCEPTION_IF_NULL(keep_dims); in get_keep_dims() 35 return GetValue<bool>(keep_dims); in get_keep_dims()
|
D | reduce_fusion.cc | 29 void ReduceFusion::set_keep_dims(const bool keep_dims) { (void)this->AddAttr(kKeepDims, MakeValue(k… in set_keep_dims() argument 66 void ReduceFusion::Init(const bool keep_dims, const ReduceMode mode, const bool reduce_to_end, cons… in Init() argument 67 this->set_keep_dims(keep_dims); in Init()
|
D | arg_min_fusion.h | 44 void Init(bool keep_dims, bool out_max_value, int64_t top_k, int64_t axis = -1); 49 void set_keep_dims(const bool keep_dims);
|
D | arg_max_fusion.h | 44 …void Init(const bool keep_dims, const bool out_max_value, const int64_t top_k, const int64_t axis … 49 void set_keep_dims(const bool keep_dims);
|
D | reduce_fusion.h | 48 void Init(const bool keep_dims = false, const ReduceMode mode = ReduceMode::Reduce_Mean, 54 void set_keep_dims(const bool keep_dims);
|
/third_party/mindspore/mindspore/core/ops/ |
D | reduce.cc | 37 const bool keep_dims) { in infer_shape_reduce() argument 42 if (keep_dims) { in infer_shape_reduce() 59 if (keep_dims) { in infer_shape_reduce() 75 auto keep_dims = GetValue<bool>(primitive->GetAttr(kKeepDims)); in InferShape() local 76 auto out_shape = infer_shape_reduce(input_x_shape, axis_value, keep_dims); in InferShape() 90 void Reduce::set_keep_dims(const bool keep_dims) { (void)this->AddAttr(kKeepDims, MakeValue(keep_di… in set_keep_dims() argument 94 void Reduce::Init(const bool keep_dims) { this->set_keep_dims(keep_dims); } in Init() argument
|
D | all.cc | 23 void All::Init(const int64_t keep_dims) { this->set_keep_dims(keep_dims); } in Init() argument 25 void All::set_keep_dims(const int64_t keep_dims) { (void)this->AddAttr(kKeepDims, MakeValue(keep_di… in set_keep_dims() argument
|
D | reduce_sum.cc | 94 bool keep_dims = GetValue<bool>(keep_dimis_value_ptr); in InferShape() local 109 if (axis_shape.size() == 1 && axis_shape[0] == -1 && !keep_dims) { in InferShape() 115 } else if (!keep_dims) { in InferShape() 166 InferImplReduceFuncCalShape(&out_shape, input_shape, axis_value, keep_dims); in InferShape() 171 InferImplReduceFuncCalShape(&shape_min, input_min_shape, axis_value, keep_dims); in InferShape() 172 InferImplReduceFuncCalShape(&shape_max, input_max_shape, axis_value, keep_dims); in InferShape()
|
D | all.h | 40 void Init(const int64_t keep_dims); 45 void set_keep_dims(const int64_t keep_dims);
|
D | reduce.h | 47 void Init(const bool keep_dims = false); 52 void set_keep_dims(const bool keep_dims);
|
/third_party/mindspore/mindspore/ccsrc/backend/kernel_compiler/cpu/nnacl/infer/ |
D | reduce_infer.c | 20 …xes(const TensorC *input, TensorC *output, int *out_shape, size_t out_shape_size, bool keep_dims) { in ReduceOnAllAxes() argument 21 if (keep_dims) { in ReduceOnAllAxes() 32 size_t out_shape_size, bool keep_dims) { in ReduceOnSelectedAxes() argument 42 if (keep_dims) { in ReduceOnSelectedAxes() 83 bool keep_dims = param->keep_dims_; in ReduceInferShape() local 87 return ReduceOnAllAxes(input, output, out_shape, out_shape_size, keep_dims); in ReduceInferShape() 128 keep_dims = false; in ReduceInferShape() 132 return ReduceOnAllAxes(input, output, out_shape, out_shape_size, keep_dims); in ReduceInferShape() 135 …n ReduceOnSelectedAxes(input, num_axes, actual_axes, output, out_shape, out_shape_size, keep_dims); in ReduceInferShape()
|
D | mean_infer.c | 36 bool keep_dims = (bool)(param->keep_dims_); in MeanInferShape() local 43 if (keep_dims) { in MeanInferShape() 65 if (keep_dims) { in MeanInferShape()
|
/third_party/mindspore/tests/st/ops/cpu/ |
D | test_argminwithvalue_op.py | 28 def __init__(self, axis=0, keep_dims=False): argument 30 self.argmin = P.ArgMinWithValue(axis=axis, keep_dims=keep_dims) 44 argmin_a0 = NetArgminWithValue(axis=0, keep_dims=False) 53 argmin_a0k = NetArgminWithValue(axis=0, keep_dims=True) 62 argmin_a1 = NetArgminWithValue(axis=1, keep_dims=False) 71 argmin_a1k = NetArgminWithValue(axis=-1, keep_dims=True) 89 argmin_a0 = NetArgminWithValue(axis=0, keep_dims=False) 98 argmin_a0k = NetArgminWithValue(axis=0, keep_dims=True) 107 argmin_a1 = NetArgminWithValue(axis=1, keep_dims=False) 116 argmin_a1k = NetArgminWithValue(axis=-1, keep_dims=True) [all …]
|
/third_party/mindspore/mindspore/explainer/ |
D | _operators.py | 75 def argmax(inputs: Tensor, axis: int = -1, keep_dims: bool = False) -> Tensor: 80 if keep_dims: 86 def argmin(inputs: Tensor, axis: int = -1, keep_dims: bool = False) -> Tensor: 91 if keep_dims: 133 def maximum(inputs: Tensor, axis: _Axis = (), keep_dims: bool = False) -> Tensor: 135 max_op = op.ReduceMax(keep_dims) 140 def minimum(inputs: Tensor, axis: _Axis = (), keep_dims: bool = False) -> Tensor: 142 max_op = op.ReduceMin(keep_dims) 147 def mean(inputs: Tensor, axis: _Axis = (), keep_dims: bool = False) -> Tensor: 149 mean_op = op.ReduceMean(keep_dims) [all …]
|
/third_party/mindspore/mindspore/ccsrc/transform/graph_ir/op_declare/ |
D | reduce_ops_declare.cc | 59 ATTR_MAP(ReduceAnyD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}}; 67 ATTR_MAP(ReduceSumD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}}; 75 ATTR_MAP(ReduceProdD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}}; 83 ATTR_MAP(ReduceAllD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}}; 91 ATTR_MAP(ReduceMeanD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}}; 99 ATTR_MAP(ReduceMinD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}}; 107 ATTR_MAP(ReduceMaxD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
|
/third_party/mindspore/tests/st/pynative/ |
D | test_pynative_layernorm_input_and_argmaxwithvalue.py | 149 def __init__(self, axis, keep_dims): argument 151 self.op = op.ArgMaxWithValue(axis=axis, keep_dims=keep_dims) 162 def __init__(self, input_shape, axis, keep_dims, dtype=np.float32): argument 168 self.keep_dims = keep_dims 172 net = ArgMaxWithValue(axis=self.axis, keep_dims=self.keep_dims) 178 value = np.amax(self.input_np, axis=self.axis, keepdims=self.keep_dims) 187 net = ArgMaxWithValue(axis=self.axis, keep_dims=self.keep_dims) 217 fact = ArgMaxWithValueFactory(input_shape=[1024, 1024], axis=-1, keep_dims=False)
|
/third_party/mindspore/tests/ut/python/parallel/ |
D | test_reduce_method_info.py | 88 self.reduce_sum = P.ReduceSum(keep_dims=False).shard(strategy2) 115 self.reduce_sum = P.ReduceSum(keep_dims=False).shard(strategy2) 142 self.reduce_sum = P.ReduceSum(keep_dims=False).shard(strategy2) 169 self.reduce_sum = P.ReduceSum(keep_dims=True).shard(strategy2) 196 self.reduce_sum = P.ReduceSum(keep_dims=True).shard(strategy2) 219 self.reduce_sum = P.ReduceSum(keep_dims=True).shard(strategy2) 242 self.reduce_sum = P.ReduceSum(keep_dims=True).shard(strategy2) 265 self.reduce_max = P.ReduceMax(keep_dims=False).shard(strategy2) 292 self.reduce_min = P.ReduceMin(keep_dims=False).shard(strategy2) 319 self.reduce_mean = P.ReduceMean(keep_dims=False).shard(strategy2) [all …]
|
/third_party/mindspore/tests/ut/python/ops/ |
D | test_math_ops_check.py | 53 def __init__(self, op_class, keep_dims, axis): argument 56 self.op = op_class(keep_dims=keep_dims) 102 'block': (ReduceNet(P.ReduceMean, keep_dims=True, axis=5.0), 108 'block': (ReduceNet(P.ReduceMean, keep_dims=True, axis=5), 115 'block': (ReduceNet(P.ReduceSum, keep_dims=True, axis=5.0), 121 'block': (ReduceNet(P.ReduceSum, keep_dims=True, axis=5), 128 'block': (ReduceNet(P.ReduceAll, keep_dims=True, axis=5.0), 134 'block': (ReduceNet(P.ReduceAll, keep_dims=True, axis=5), 141 'block': (ReduceNet(P.ReduceMax, keep_dims=True, axis=5.0), 147 'block': (ReduceNet(P.ReduceMax, keep_dims=True, axis=5), [all …]
|
/third_party/mindspore/tests/st/ops/graph_kernel/ |
D | test_reduce_max.py | 25 def __init__(self, keep_dims): argument 27 self.reduce_max = P.ReduceMax(keep_dims) 33 def get_output(x, axis, keep_dims, enable_graph_kernel=False): argument 35 net = ReduceMax(keep_dims)
|
D | test_reduce_min.py | 25 def __init__(self, keep_dims): argument 27 self.reduce_min = P.ReduceMin(keep_dims) 33 def get_output(x, axis, keep_dims, enable_graph_kernel=False): argument 35 net = ReduceMin(keep_dims)
|
/third_party/mindspore/mindspore/lite/test/ut/tools/converter/parser/tflite/ |
D | tflite_reduce_parser_test.cc | 38 ASSERT_EQ(val->keep_dims, false); in TEST_F() 58 ASSERT_EQ(val->keep_dims, false); in TEST_F() 78 ASSERT_EQ(val->keep_dims, false); in TEST_F() 99 ASSERT_EQ(val->keep_dims, false); in TEST_F() 120 ASSERT_EQ(val->keep_dims, true); in TEST_F()
|
/third_party/mindspore/tests/st/ops/ascend/test_tbe_ops/ |
D | test_ReduceMean.py | 27 def __init__(self, keep_dims, axis): argument 29 self.reduce_mean = P.ReduceMean(keep_dims=keep_dims)
|
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/ascend/ir_fission/ |
D | reduce_min_fission.cc | 83 bool keep_dims) { in GetInferShape() argument 87 if (keep_dims) { in GetInferShape() 129 auto keep_dims = AnfAlgo::GetNodeAttr<bool>(cnode, kAttrKeepDims); in Process() local 139 std::vector<size_t> shape_first = GetInferShape(shape, axis_first, keep_dims); in Process()
|
/third_party/mindspore/mindspore/_extends/graph_kernel/expanders/ |
D | reduce_mean.py | 28 keep_dims = self.attrs['keep_dims'] 40 … sum_x = graph_builder.emit('ReduceSum', [x], attrs={'reduce_axis': axis, 'keep_dims': keep_dims})
|