1 /**
2 * Copyright 2020 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "common/common.h"
17 #include "minddata/dataset/kernels/data/pad_end_op.h"
18 #include "utils/log_adapter.h"
19
20 using namespace mindspore::dataset;
21 using mindspore::LogStream;
22 using mindspore::ExceptionType::NoExceptionType;
23 using mindspore::MsLogLevel::INFO;
24
25 class MindDataTestPadEndOp : public UT::Common {
26 protected:
MindDataTestPadEndOp()27 MindDataTestPadEndOp() {}
28 };
29
TEST_F(MindDataTestPadEndOp,TestOp)30 TEST_F(MindDataTestPadEndOp, TestOp) {
31 MS_LOG(INFO) << "Doing MindDataTestPadEndOp.";
32
33 // first set of testunits for numeric values
34
35 TensorShape pad_data_shape({});
36
37 // prepare input tensor
38 std::vector<float> orig1 = {1, 1, 1, 1};
39 TensorShape input_shape1({2, 2});
40 std::vector<TensorShape> input_shape1_vector = {input_shape1};
41 std::shared_ptr<Tensor> input1;
42 Tensor::CreateFromVector(orig1, input_shape1, &input1);
43
44 // pad_shape
45 TensorShape pad_shape1[3] = {TensorShape({3, 3}), TensorShape({2, 4}), TensorShape({4, 2})};
46
47 // value to pad
48 std::vector<std::vector<float>> pad_data1 = {{0}, {3.5}, {3.5}};
49
50 std::shared_ptr<Tensor> expected1[3];
51
52 // expected tensor output for testunit 1
53 std::vector<float> out1 = {1, 1, 0, 1, 1, 0, 0, 0, 0};
54 Tensor::CreateFromVector(out1, pad_shape1[0], &(expected1[0]));
55
56 // expected tensor output for testunit 2
57 std::vector<float> out2 = {1, 1, 3.5, 3.5, 1, 1, 3.5, 3.5};
58 Tensor::CreateFromVector(out2, pad_shape1[1], &(expected1[1]));
59
60 // expected tensor output for testunit 3
61 std::vector<float> out3 = {1, 1, 1, 1, 3.5, 3.5, 3.5, 3.5};
62 Tensor::CreateFromVector(out3, pad_shape1[2], &(expected1[2]));
63
64 // run the PadEndOp
65 for (auto i = 0; i < 3; i++) {
66 std::shared_ptr<Tensor> output;
67 std::vector<TensorShape> output_shape = {TensorShape({})};
68
69 std::shared_ptr<Tensor> pad_value1;
70 Tensor::CreateFromVector(pad_data1[i], pad_data_shape, &pad_value1);
71
72 std::unique_ptr<PadEndOp> op(new PadEndOp(pad_shape1[i], pad_value1));
73 Status s = op->Compute(input1, &output);
74
75 EXPECT_TRUE(s.IsOk());
76 ASSERT_TRUE(output->shape() == expected1[i]->shape());
77 ASSERT_TRUE(output->type() == expected1[i]->type());
78 MS_LOG(DEBUG) << *output << std::endl;
79 MS_LOG(DEBUG) << *expected1[i] << std::endl;
80 ASSERT_TRUE(*output == *expected1[i]);
81
82 s = op->OutputShape(input_shape1_vector, output_shape);
83 EXPECT_TRUE(s.IsOk());
84 ASSERT_TRUE(output_shape.size() == 1);
85 ASSERT_TRUE(output->shape() == output_shape[0]);
86 }
87
88 // second set of testunits for string
89
90 // input tensor
91 std::vector<std::string> orig2 = {"this", "is"};
92 TensorShape input_shape2({2});
93 std::vector<TensorShape> input_shape2_vector = {input_shape2};
94 std::shared_ptr<Tensor> input2;
95 Tensor::CreateFromVector(orig2, input_shape2, &input2);
96
97 // pad_shape
98 TensorShape pad_shape2[3] = {TensorShape({5}), TensorShape({2}), TensorShape({10})};
99
100 // pad value
101 std::vector<std::string> pad_data2[3] = {{""}, {"P"}, {" "}};
102 std::shared_ptr<Tensor> pad_value2[3];
103
104 // expected output for 3 testunits
105 std::shared_ptr<Tensor> expected2[3];
106 std::vector<std::string> outstring[3] = {
107 {"this", "is", "", "", ""}, {"this", "is"}, {"this", "is", " ", " ", " ", " ", " ", " ", " ", " "}};
108
109 for (auto i = 0; i < 3; i++) {
110 // pad value
111 Tensor::CreateFromVector(pad_data2[i], pad_data_shape, &pad_value2[i]);
112
113 std::shared_ptr<Tensor> output;
114 std::vector<TensorShape> output_shape = {TensorShape({})};
115
116 std::unique_ptr<PadEndOp> op(new PadEndOp(pad_shape2[i], pad_value2[i]));
117
118 Status s = op->Compute(input2, &output);
119
120 Tensor::CreateFromVector(outstring[i], pad_shape2[i], &expected2[i]);
121
122 EXPECT_TRUE(s.IsOk());
123 ASSERT_TRUE(output->shape() == expected2[i]->shape());
124 ASSERT_TRUE(output->type() == expected2[i]->type());
125 MS_LOG(DEBUG) << *output << std::endl;
126 MS_LOG(DEBUG) << *expected2[i] << std::endl;
127 ASSERT_TRUE(*output == *expected2[i]);
128
129 s = op->OutputShape(input_shape2_vector, output_shape);
130 EXPECT_TRUE(s.IsOk());
131 ASSERT_TRUE(output_shape.size() == 1);
132 ASSERT_TRUE(output->shape() == output_shape[0]);
133 }
134
135 MS_LOG(INFO) << "MindDataTestPadEndOp end.";
136 }
137