Home
last modified time | relevance | path

Searched full:shape (Results 1 – 25 of 6841) sorted by relevance

12345678910>>...274

/third_party/mindspore/mindspore-src/source/mindspore/ccsrc/plugin/device/ascend/kernel/aicpu/aicpu_ops/customize/op_proto/utils/
Dcommon_shape_fns.h33 Shape shape_;
43 * Check whether Shape's rank is at least rank
45 * @param rank expect val of Shape
46 * @param out Output Shape
47 * @return status whether Shape's condition Satisfied
49 graphStatus WithRankAtLeast(const TensorDesc &tensor, int64_t rank, Shape &out, const ge::Operator …
52 * Check whether Shape's rank is at least rank
54 * @param rank expect val of Shape
55 * @param out Output Shape
56 * @return status whether Shape's condition Satisfied
[all …]
/third_party/typescript/tests/baselines/reference/
DtypeGuardNarrowsIndexedAccessOfKnownProperty1.types30 type Shape = Square | Rectangle | Circle;
31 >Shape : Square | Rectangle | Circle
35 >"0" : { sub: { under: { shape: Shape; };}; }
38 >sub : { under: { shape: Shape;}; }
41 >under : { shape: Shape; }
43 shape: Shape;
44 >shape : Shape
49 function area(s: Shape): number {
50 >area : (s: Shape) => number
51 >s : Shape
[all …]
DmappedTypes2.types85 interface Shape {
127 function f0(s1: Shape, s2: Shape) {
128 >f0 : (s1: Shape, s2: Shape) => void
129 >s1 : Shape
130 >s2 : Shape
135 >s1 : Shape
143 >s2 : Shape
151 function f1(shape: Shape) {
152 >f1 : (shape: Shape) => void
153 >shape : Shape
[all …]
DmappedTypes2.js37 interface Shape {
58 function f0(s1: Shape, s2: Shape) { argument
63 function f1(shape: Shape) { argument
65 var frozen: Readonly<Shape>;
66 var frozen = freeze(shape);
69 function f2(shape: Shape) { argument
71 var partial: Partial<Shape>;
72 var partial: Partial<Shape> = {};
75 function f3(shape: Shape) { argument
76 const x = pick(shape, "name", "location"); // { name: string, location: Point }
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/core/ops/ops_def/doc/
Dbroadcast_to_doc.yaml3 Broadcasts input tensor to a given shape. The dim of input shape must be smaller
4 than or equal to that of target shape. Suppose input shape is :math:`(x_1, x_2, ..., x_m)`,
5 … target shape is :math:`(*, y_1, y_2, ..., y_m)`, where :math:`*` means any additional dimension.
12 … value pairs at a specific dim are equal, then that value goes right into that dim of output shape.
13 …With an input shape :math:`(2, 3)`, target shape :math:`(2, 3)` , the inferred output shape is :ma…
17 Case 1: If the value of the target shape in the dimension is -1, the value of the
18 … output shape in the dimension is the value of the corresponding input shape in the dimension.
19 With an input shape :math:`(3, 3)`, target
20 shape :math:`(-1, 3)`, the output shape is :math:`(3, 3)`.
22 Case 2: If the value of target shape in the dimension is not -1, but the corresponding
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/core/utils/
Dshape_utils.h27 inline std::string ShapeVectorToString(const ShapeVector &shape) { in ShapeVectorToString() argument
29 for (auto &item : shape) { in ShapeVectorToString()
36 inline size_t SizeOf(const ShapeVector &shape) { in SizeOf() argument
38 for (auto dim : shape) { in SizeOf()
40 // For dynamic shape which has negative dimensions, data size should be zero. in SizeOf()
44 MS_EXCEPTION(ValueError) << "The product value of shape (" << ShapeVectorToString(shape) in SizeOf()
52 inline bool IsOneElementShape(const ShapeVector &shape) { in IsOneElementShape() argument
53 if (shape.empty()) { in IsOneElementShape()
55 } else if (shape.size() == 1 && shape[0] == 1) { in IsOneElementShape()
72 inline bool IsDynamicRank(const ShapeVector &shape) { in IsDynamicRank() argument
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/ccsrc/plugin/device/gpu/kernel/
Dgpu_kernel.cc159 void ShapeNCHW2NHWC(ShapeVector *shape) { in ShapeNCHW2NHWC() argument
160 std::swap((*shape)[kShapeIndex1st], (*shape)[kShapeIndex3rd]); in ShapeNCHW2NHWC()
161 std::swap((*shape)[kShapeIndex2nd], (*shape)[kShapeIndex1st]); in ShapeNCHW2NHWC()
164 void ShapeNCDHW2NDHWC(ShapeVector *shape) { in ShapeNCDHW2NDHWC() argument
165 std::swap((*shape)[kShapeIndex1st], (*shape)[kShapeIndex2nd]); in ShapeNCDHW2NDHWC()
166 std::swap((*shape)[kShapeIndex2nd], (*shape)[kShapeIndex3rd]); in ShapeNCDHW2NDHWC()
167 std::swap((*shape)[kShapeIndex3rd], (*shape)[kShapeIndex4th]); in ShapeNCDHW2NDHWC()
171 void SetDimA(const ShapeVector &shape, int *dimA, size_t len, const std::string &format) { in SetDimA() argument
172 if (shape.size() != len) { in SetDimA()
173 …MS_EXCEPTION(ValueError) << "Invalid size of input shape " << shape.size() << "-D with dimA " << l… in SetDimA()
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/lite/src/litert/kernel/opencl/cl/
Dtranspose.cl6 …transpose_0312_NHWC4(__read_only image2d_t src_data, __write_only image2d_t dst_data, int4 shape) {
10 if (4 * X >= shape.y || Y >= shape.z || 4 * Z >= shape.w) {
13 int H4 = UP_DIV(shape.y, 4);
14 int C4 = UP_DIV(shape.w, 4);
17 if (4 * Z + 1 < shape.w) {
21 if (4 * Z + 2 < shape.w) {
25 if (4 * Z + 3 < shape.w) {
33 if (4 * X + 1 < shape.y) {
36 if (4 * X + 2 < shape.y) {
39 if (4 * X + 3 < shape.y) {
[all …]
/third_party/rust/rust/src/tools/rustfmt/src/
Dtypes.rs22 use crate::shape::Shape;
43 shape: Shape, in rewrite_path() argument
58 let fmt_ty = qself.ty.rewrite(context, shape)?; in rewrite_path()
68 let shape = shape.sub_width(3)?; in rewrite_path() localVariable
77 shape, in rewrite_path()
92 shape, in rewrite_path()
103 shape: Shape, in rewrite_path_segments() argument
109 let shape = shape.visual_indent(0); in rewrite_path_segments() localVariable
122 let extra_offset = extra_offset(&buffer, shape); in rewrite_path_segments()
123 let new_shape = shape.shrink_left(extra_offset)?; in rewrite_path_segments()
[all …]
Dshape.rs143 pub(crate) struct Shape { struct
152 impl Shape { argument
168 pub(crate) fn legacy(width: usize, indent: Indent) -> Shape { in legacy() argument
169 Shape { in legacy()
176 pub(crate) fn indented(indent: Indent, config: &Config) -> Shape { in indented() argument
177 Shape { in indented()
184 pub(crate) fn with_max_width(&self, config: &Config) -> Shape { in with_max_width() argument
185 Shape { in with_max_width()
191 pub(crate) fn visual_indent(&self, extra_width: usize) -> Shape { in visual_indent() argument
193 Shape { in visual_indent()
[all …]
/third_party/mindspore/mindspore-src/source/tests/st/ops/cpu/
Dtest_arithmetic_op.py153 error0 = np.ones(shape=expect0.shape) * 1.0e-5
155 assert output0.shape == expect0.shape
160 error1 = np.ones(shape=expect1.shape) * 1.0e-5
162 assert output1.shape == expect1.shape
167 error2 = np.ones(shape=expect2.shape) * 1.0e-5
169 assert output2.shape == expect2.shape
174 error3 = np.ones(shape=expect3.shape) * 1.0e-5
176 assert output3.shape == expect3.shape
181 error4 = np.ones(shape=expect4.shape) * 1.0e-5
183 assert output4.shape == expect4.shape
[all …]
Dtest_broadcast_to_op.py31 shape = (4, 5, 2, 3, 4, 5, 6)
33 output = P.BroadcastTo(shape)(Tensor(x_np))
34 expect = np.broadcast_to(x_np, shape)
37 shape = (3, 5, 7, 4, 5, 6)
39 output = P.BroadcastTo(shape)(Tensor(x_np))
40 expect = np.broadcast_to(x_np, shape)
43 shape = (8, 5, 7, 4, 5, 6)
45 output = P.BroadcastTo(shape)(Tensor(x_np))
46 expect = np.broadcast_to(x_np, shape)
49 shape = (3, 4, 5, 2, 3, 4, 5, 7)
[all …]
Dtest_standard_laplace.py28 def __init__(self, shape, seed=0, seed2=0): argument
30 self.shape = shape
36 return self.stdlaplace(self.shape)
45 Description: input the shape and random seed, test the output value and shape
46 Expectation: the value and shape of output tensor match the predefined values
50 shape = (5, 6, 8)
51 net = NetStandardLaplace(shape, seed, seed2)
53 assert output.shape == (5, 6, 8)
58 shape = (5, 6, 8)
59 net = NetStandardLaplace(shape, seed, seed2)
[all …]
Dtest_inplace_op.py54 if v.shape[0] == 1:
68 @pytest.mark.parametrize('shape, indice_len', [((10, 4, 3, 2), 4), ((5, 2, 4, 6), 3)])
71 def test_inplace_add(shape, indice_len, dtype): argument
78 x = np.random.random(shape).astype(dtype)
79 v = np.random.random((indice_len,) + shape[1:]).astype(dtype)
80 indices = np.random.choice(list(range(shape[0])), indice_len, replace=False)
91 @pytest.mark.parametrize('shape', [(10, 4, 3, 2), (5, 2, 4, 6)])
93 def test_inplace_add_same_indice(shape, dtype): argument
102 x = np.random.random(shape).astype(dtype)
103 v = np.random.random((len(indices),) + shape[1:]).astype(dtype)
[all …]
/third_party/mindspore/mindspore-src/source/tests/st/ops/gpu/
Dtest_realdiv_op.py65 error0 = np.ones(shape=expect0.shape) * 1.0e-5
67 assert output0.shape == expect0.shape
72 error1 = np.ones(shape=expect1.shape) * 1.0e-5
74 assert output1.shape == expect1.shape
79 error2 = np.ones(shape=expect2.shape) * 1.0e-5
81 assert output2.shape == expect2.shape
86 error3 = np.ones(shape=expect3.shape) * 1.0e-5
88 assert output3.shape == expect3.shape
93 error4 = np.ones(shape=expect4.shape) * 1.0e-5
95 assert output4.shape == expect4.shape
[all …]
Dtest_reduce_sum_op.py183 error0 = np.ones(shape=expect0.shape) * 1.0e-5
185 assert output[0].shape == expect0.shape
189 error1 = np.ones(shape=expect1.shape) * 1.0e-5
191 assert output[1].shape == expect1.shape
195 error2 = np.ones(shape=expect2.shape) * 1.0e-5
197 assert output[2].shape == expect2.shape
201 error3 = np.ones(shape=expect3.shape) * 1.0e-5
203 assert output[3].shape == expect3.shape
207 error4 = np.ones(shape=expect4.shape) * 1.0e-5
209 assert output[4].shape == expect4.shape
[all …]
Dtest_mul_op.py64 error0 = np.ones(shape=expect0.shape) * 1.0e-5
66 assert output0.shape == expect0.shape
71 error1 = np.ones(shape=expect1.shape) * 1.0e-5
73 assert output1.shape == expect1.shape
78 error2 = np.ones(shape=expect2.shape) * 1.0e-5
80 assert output2.shape == expect2.shape
85 error3 = np.ones(shape=expect3.shape) * 1.0e-5
87 assert output3.shape == expect3.shape
92 error4 = np.ones(shape=expect4.shape) * 1.0e-5
94 assert output4.shape == expect4.shape
[all …]
/third_party/mindspore/mindspore-src/source/tests/st/ops/dynamic_shape/
Dtest_binary_cross_entropy_dyn.py16 """test BinaryCrossEntropy forward and backward dynamic shape"""
58 Description: test the ops in dynamic shape.
59 Expectation: expect correct shape result.
64 prediction_dyn = Tensor(shape=(None,), dtype=mstype.float32)
65 target_dyn = Tensor(shape=(None,), dtype=mstype.float32)
66 weight_dyn = Tensor(shape=(None,), dtype=mstype.float32)
72 assert loss.asnumpy().shape == prediction.shape
77 assert grad[0].asnumpy().shape == prediction.shape
78 assert grad[1].asnumpy().shape == target.shape
79 assert grad[2].asnumpy().shape == weight.shape
[all …]
Dtest_nllloss_dyn.py16 """test NLLLoss forward and backward dynamic shape"""
64 Description: test the ops in dynamic shape.
65 Expectation: expect correct output shape.
68 logits_dyn = Tensor(shape=[None]*len(logits.shape), dtype=logits.dtype)
69 target_dyn = Tensor(shape=[None]*len(target.shape), dtype=target.dtype)
70 weight_dyn = Tensor(shape=[None]*len(weight.shape), dtype=weight.dtype)
73 assert loss.asnumpy().shape == (logits.shape[0],)
74 assert total_weight.asnumpy().shape == tuple()
79 assert expect_grad[0].asnumpy().shape == logits.asnumpy().shape
80 assert expect_grad[1].asnumpy().shape == target.asnumpy().shape
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/python/mindspore/common/
Dsparse_tensor.py39 def __init__(self, indices=None, values=None, shape=None, row_tensor=None): argument
46 if not (indices is None and values is None and shape is None):
49 # Init a RowTensor from indices, values and shape
53 RowTensor_.__init__(self, indices, values, shape)
74 """Return RowTensor's shape."""
82 …When the `values` of a RowTensor has a shape of :math:`(d_0, d_1, ..., d_n)`, then this RowTensor …
83 …represent a subset of a larger dense tensor of shape :math:`(l_0, d_1, ..., d_n)`, where :math:`d_…
90 For example, if indices is [0], values is [[1, 2]], shape is
103 indices (Tensor): A 1-D integer Tensor of shape :math:`(d_0)` . Default: ``None``.
104 … values (Tensor): A Tensor of any dtype of shape :math:`(d_0, d_1, ..., d_n)` . Default: ``None``.
[all …]
/third_party/rust/rust/tests/ui/suggestions/
Dsuggest-variants.stderr1 error[E0599]: no variant named `Squareee` found for enum `Shape`
4 LL | enum Shape {
7 LL | println!("My shape is {:?}", Shape::Squareee { size: 5});
10 error[E0599]: no variant named `Circl` found for enum `Shape`
13 LL | enum Shape {
16 LL | println!("My shape is {:?}", Shape::Circl { size: 5});
19 error[E0599]: no variant named `Rombus` found for enum `Shape`
22 LL | enum Shape {
25 LL | println!("My shape is {:?}", Shape::Rombus{ size: 5});
26 | ^^^^^^ variant not found in `Shape`
[all …]
/third_party/mindspore/mindspore-src/source/tests/st/sparse/
Dtest_coo.py36 Description: Test COOTensor(indices, values, shape) and COOTensor(COOTensor)
70 shape = x.shape
71 return COOTensor(indices, values, shape)
78 shape = x.shape
79 return COOTensor(indices, values, shape)
82 def __init__(self, shape): argument
86 self.shape = shape
89 x = COOTensor(indices, values, self.shape)
94 return x.indices, x.values, x.shape
100 shape = (3, 4)
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/ccsrc/frontend/parallel/tensor_layout/
Dshape_util.h26 * compute the accumulating product of all the values in shape from left to right,
29 …* given a shape = [d_n-1, d_n-2, ..., d_0](d_i > 0, i=0,1,...,n-1, elements of shape must be large…
33 * shape = [2, 8, 32]
37 Status ShapeToAccumulateProduct(const Shape &shape, Shape *shape_accum);
40 * compute the accumulating product of all the values in shape from right to left,
43 …* given a shape = [d_n-1, d_n-2, ..., d_0](d_i > 0, i=0,1,...,n-1, elements of shape must be large…
47 * shape = [2, 8, 32]
51 Status ShapeToAccumulateProductReverse(const Shape &shape, Shape *shape_accum);
54 * compute the original shape from the accumulating product shape_accum,
59 * then *shape = [accum_n-2/accum_n-1, accum_n-3/accum_n-2, ..., accum_0/accum_1]
[all …]
Dshape_util.cc25 * shape = [2, 8, 32]
28 Status ShapeToAccumulateProduct(const Shape &shape, Shape *shape_accum) { in ShapeToAccumulateProduct() argument
32 for (auto iter = shape.begin(); iter < shape.end(); ++iter) { in ShapeToAccumulateProduct()
35 MS_LOG(ERROR) << "element of shape should not be zero"; in ShapeToAccumulateProduct()
45 * shape = [2, 8, 32]
49 Status ShapeToAccumulateProductReverse(const Shape &shape, Shape *shape_accum) { in ShapeToAccumulateProductReverse() argument
53 for (auto iter = shape.end() - 1; iter >= shape.begin(); --iter) { in ShapeToAccumulateProductReverse()
56 MS_LOG(ERROR) << "element of shape should not be zero"; in ShapeToAccumulateProductReverse()
67 * shape = [2, 8, 32]
70 Status AccumulateProductToShape(const Shape &shape_accum, Shape *shape) { in AccumulateProductToShape() argument
[all …]
/third_party/mindspore/mindspore-src/source/mindspore/core/abstract/
Ddshape.h42 /// \brief BaseShape defines the basic virtual class of NoShape and Shape classes.
90 /// \brief Broaden the shape.
93 /// \brief Get shape dimensions of BaseShape object.
95 /// \return Shape dimensions.
100 /// \brief Set shape dimensions of BaseShape object.
102 /// \param[in] shape Dimensions of shape.
103 virtual void SetShapeVector(const ShapeVector &shape) { in SetShapeVector() argument
107 /// \brief Build symbolic shape according to the digital shape.
110 /// \return Symbolic Shape.
116 /// \brief NoShape defines an invalid shape.
[all …]

12345678910>>...274