Home
last modified time | relevance | path

Searched refs:w_out (Results 1 – 8 of 8) sorted by relevance

/third_party/mindspore/tests/ut/cpp/dataset/
Drandom_crop_and_resize_op_test.cc40 int w_out = 2048; in TEST_F() local
46 TensorShape s_out({h_out, w_out, s_in[2]}); in TEST_F()
48 …auto op = std::make_unique<RandomCropAndResizeOp>(h_out, w_out, scale_lb, scale_ub, aspect_lb, asp… in TEST_F()
66 int w_out = 2048; in TEST_F() local
72 TensorShape s_out({h_out, w_out, s_in[2]}); in TEST_F()
74 …auto op = std::make_unique<RandomCropAndResizeOp>(h_out, w_out, scale_lb, scale_ub, aspect_lb, asp… in TEST_F()
92 int w_out = 2048; in TEST_F() local
98 TensorShape s_out({h_out, w_out, s_in[2]}); in TEST_F()
100 …auto op = std::make_unique<RandomCropAndResizeOp>(h_out, w_out, scale_lb, scale_ub, aspect_lb, asp… in TEST_F()
Drandom_crop_and_resize_with_bbox_op_test.cc44 int w_out = 2048; in TEST_F() local
49 …auto op = std::make_unique<RandomCropAndResizeWithBBoxOp>(h_out, w_out, scale_lb, scale_ub, aspect… in TEST_F()
74 int w_out = 2048; in TEST_F() local
79 …auto op = std::make_unique<RandomCropAndResizeWithBBoxOp>(h_out, w_out, scale_lb, scale_ub, aspect… in TEST_F()
92 int w_out = 2048; in TEST_F() local
97 …auto op = std::make_unique<RandomCropAndResizeWithBBoxOp>(h_out, w_out, scale_lb, scale_ub, aspect… in TEST_F()
/third_party/mindspore/mindspore/ops/operations/
D_thor_ops.py502 w_out = math.ceil((x_shape[3] - dilation_w * (kernel_size_w - 1)) / stride_w)
506 w_out = math.ceil(x_shape[3] / stride_w)
510 … pad_needed_w = max(0, (w_out - 1) * stride_w + dilation_w * (kernel_size_w - 1) + 1 - x_shape[3])
516w_out = 1 + (x_shape[3] + 2 * self.pad - kernel_size_w - (kernel_size_w - 1) * (dilation_w - 1)) /…
518 w_out = math.floor(w_out)
525 out_shape = [channel, k_h, k_w, batch_size, h_out, w_out]
582 w_out = math.ceil((x_shape[3] - dilation_w * (kernel_size_w - 1)) / stride_w)
586 w_out = math.ceil(x_shape[3] / stride_w)
590 … pad_needed_w = max(0, (w_out - 1) * stride_w + dilation_w * (kernel_size_w - 1) + 1 - x_shape[3])
599 out_shape = [batch_size, h_out, w_out, channel * k_h * k_w]
[all …]
Dnn_ops.py1575 w_out = math.ceil((x_shape[3] - dilation_w * (kernel_size_w - 1)) / stride_w)
1579 w_out = math.ceil(x_shape[3] / stride_w)
1585 … pad_needed_w = max(0, (w_out - 1) * stride_w + dilation_w * (kernel_size_w - 1) + 1 - x_shape[3])
1593w_out = 1 + (x_shape[3] + pad_left + pad_right - kernel_size_w - (kernel_size_w - 1) * (dilation_w…
1596 w_out = math.floor(w_out)
1602 out_shape = [x_shape[0], out_channel, h_out, w_out]
8380 w_out = math.ceil((x_shape[4] - dilation_w * (kernel_size_w - 1)) / stride_w)
8386 w_out = math.ceil(x_shape[4] / stride_w)
8396 … pad_needed_w = max(0, (w_out - 1) * stride_w + dilation_w * (kernel_size_w - 1) + 1 - x_shape[4])
8406 w_out = 1 + (x_shape[4] + pad_left + pad_right - kernel_size_w - (kernel_size_w - 1)
[all …]
/third_party/mindspore/mindspore/nn/layer/
Dconv.py1073w_out = _deconv_output_length(self.is_valid, self.is_same, self.is_pad, w, self.kernel_size[1],
1076 … return self.bias_add(self.conv2d_transpose(x, self.weight, (n, self.out_channels, h_out, w_out)),
1078 return self.conv2d_transpose(x, self.weight, (n, self.out_channels, h_out, w_out))
1270w_out = _deconv_output_length(self.is_valid, self.is_same, self.is_pad, w, self.kernel_size[1],
1272 output = self.conv2d_transpose(x, self.weight, (n, self.out_channels, h_out, w_out))
/third_party/ffmpeg/libavutil/
Dopt.h761 int av_opt_get_image_size(void *obj, const char *name, int search_flags, int *w_out, int *h_out);
Dopt.c952 int av_opt_get_image_size(void *obj, const char *name, int search_flags, int *w_out, int *h_out) in av_opt_get_image_size() argument
965 if (w_out) *w_out = *(int *)dst; in av_opt_get_image_size()
/third_party/mindspore/mindspore/core/abstract/
Dprim_nn.cc100 int64_t w_out = ((w_input + 2 * padding - (window - 1) - 1) / stride) + 1; in InferImplPooling() local
101 ShapeVector shape_out = {input_shape->shape()[0], input_shape->shape()[1], h_out, w_out}; in InferImplPooling()