• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2019 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <string>
18 #include <list>
19 #include <vector>
20 #include "common/common_test.h"
21 #include "frontend/parallel/strategy.h"
22 #include "frontend/parallel/ops_info/activation_info.h"
23 #include "frontend/parallel/device_manager.h"
24 #include "frontend/parallel/step_parallel.h"
25 
26 namespace mindspore {
27 namespace parallel {
28 
29 class LogSoftmaxInfo;
30 using LogSoftmaxInfoPtr = std::shared_ptr<LogSoftmaxInfo>;
31 LogSoftmaxInfoPtr log_softmax;
32 
33 class TestLogSoftmaxInfo : public UT::Common {
34  public:
TestLogSoftmaxInfo()35   TestLogSoftmaxInfo() {}
36   void SetUp();
TearDown()37   void TearDown() {}
38 };
39 
SetUp()40 void TestLogSoftmaxInfo::SetUp() {
41   RankList dev_list;
42 
43   for (int32_t i = 0; i < 130; i++) {
44     dev_list.push_back(i);
45   }
46 
47   RankList stage_map;
48   stage_map.push_back(128);
49   stage_map.push_back(2);
50 
51   int32_t local_dev = 0;
52 
53   // create a new g_device_manager
54   g_device_manager = std::make_shared<DeviceManager>();
55   g_device_manager->Init(dev_list, local_dev, stage_map, "hccl");
56 
57   ValuePtr axis = MakeValue(static_cast<int64_t>(-2));
58   std::unordered_map<std::string, ValuePtr> attr = {{"axis", axis}};
59 
60   Shapes inputs_shape = {{2, 4, 8, 16}};
61   Shapes outputs_shape = {{2, 4, 8, 16}};
62 
63   log_softmax = std::make_shared<LogSoftmaxInfo>("log_softmax_info", inputs_shape, outputs_shape, attr);
64 }
65 
TEST_F(TestLogSoftmaxInfo,InferDevMatrixShape1)66 TEST_F(TestLogSoftmaxInfo, InferDevMatrixShape1) {
67   Strategys inputs = {{2, 4, 1, 16}};
68   StrategyPtr strategy = NewStrategy(0, inputs);
69 
70   log_softmax->Init(strategy);
71   Shape dev_matrix_shape = log_softmax->dev_matrix_shape();
72 
73   Shape expect = {2, 4, 1, 16};
74   ASSERT_EQ(dev_matrix_shape, expect);
75 }
76 
TEST_F(TestLogSoftmaxInfo,InferSliceShape1)77 TEST_F(TestLogSoftmaxInfo, InferSliceShape1) {
78   Strategys str = {{2, 4, 1, 16}};
79   StrategyPtr strategy = NewStrategy(0, str);
80 
81   log_softmax->Init(strategy);
82   std::vector<TensorInfo> inputs = log_softmax->inputs_tensor_info();
83   std::vector<TensorInfo> outputs = log_softmax->outputs_tensor_info();
84 
85   Shape input_slice_shape_expect = {1, 1, 8, 1};
86   Shape output_slice_shape_expect = {1, 1, 8, 1};
87 
88   TensorInfo input_tensor_info = inputs.at(0);
89   TensorInfo output_tensor_info = outputs.at(0);
90 
91   Shape input_slice_shape = input_tensor_info.slice_shape();
92   Shape output_slice_shape = output_tensor_info.slice_shape();
93 
94   ASSERT_EQ(input_slice_shape, input_slice_shape_expect);
95   ASSERT_EQ(output_slice_shape, output_slice_shape_expect);
96 }
97 
TEST_F(TestLogSoftmaxInfo,GetTensorLayout1)98 TEST_F(TestLogSoftmaxInfo, GetTensorLayout1) {
99   Strategys str = {{2, 4, 1, 16}};
100   StrategyPtr strategy = NewStrategy(0, str);
101 
102   log_softmax->Init(strategy);
103   std::vector<TensorInfo> inputs = log_softmax->inputs_tensor_info();
104   std::vector<TensorInfo> outputs = log_softmax->outputs_tensor_info();
105 
106   TensorMap input_expect = {3, 2, 1, 0};
107   TensorMap output_expect = {3, 2, 1, 0};
108 
109   TensorInfo input_tensor_info = inputs.at(0);
110   TensorInfo output_tensor_info = outputs.at(0);
111 
112   Map input_tensor_map = input_tensor_info.tensor_layout().origin_tensor_map();
113   Map output_tensor_map = output_tensor_info.tensor_layout().origin_tensor_map();
114 
115   ASSERT_EQ(input_tensor_map.array(), input_expect);
116   ASSERT_EQ(output_tensor_map.array(), output_expect);
117 }
118 
TEST_F(TestLogSoftmaxInfo,GetForwardOp1)119 TEST_F(TestLogSoftmaxInfo, GetForwardOp1) {
120   Strategys inputs = {{2, 4, 1, 16}};
121   StrategyPtr strategy = NewStrategy(0, inputs);
122 
123   log_softmax->Init(strategy);
124   OperatorVector forward_op = log_softmax->forward_op();
125   size_t size = forward_op.size();
126 
127   ASSERT_EQ(size, 0);
128 }
129 
TEST_F(TestLogSoftmaxInfo,GetMirrorOPs1)130 TEST_F(TestLogSoftmaxInfo, GetMirrorOPs1) {
131   Strategys inputs = {{2, 4, 1, 16}};
132   StrategyPtr strategy = NewStrategy(0, inputs);
133 
134   log_softmax->Init(strategy);
135   MirrorOps mirror_ops = log_softmax->mirror_ops();
136 
137   size_t size = mirror_ops.size();
138 
139   ASSERT_EQ(size, 0);
140 }
141 
TEST_F(TestLogSoftmaxInfo,CheckStrategy1)142 TEST_F(TestLogSoftmaxInfo, CheckStrategy1) {
143   // Success: {{2,4,1,16}}
144   Strategys inputs = {{2, 2, 8, 16}, {2, 4, 16, 1}};
145   StrategyPtr strategy = NewStrategy(0, inputs);
146 
147   Status ret = log_softmax->Init(strategy);
148   ASSERT_EQ(ret, FAILED);
149 }
150 
TEST_F(TestLogSoftmaxInfo,CheckStrategy2)151 TEST_F(TestLogSoftmaxInfo, CheckStrategy2) {
152   // Success: {{2,4,1,16}}
153   Strategys inputs = {{2, 4, 8}};
154   StrategyPtr strategy = NewStrategy(0, inputs);
155 
156   Status ret = log_softmax->Init(strategy);
157   ASSERT_EQ(ret, FAILED);
158 }
159 
TEST_F(TestLogSoftmaxInfo,CheckStrategy3)160 TEST_F(TestLogSoftmaxInfo, CheckStrategy3) {
161   // Success: {{2,4,1,16}}
162   Strategys inputs = {{2, 4, 8, 16}};
163   StrategyPtr strategy = NewStrategy(0, inputs);
164 
165   Status ret = log_softmax->Init(strategy);
166   ASSERT_EQ(ret, FAILED);
167 }
168 
TEST_F(TestLogSoftmaxInfo,GetDeviceList1)169 TEST_F(TestLogSoftmaxInfo, GetDeviceList1) {
170   Strategys inputs = {{2, 4, 1, 16}};
171   StrategyPtr strategy = NewStrategy(0, inputs);
172 
173   log_softmax->Init(strategy);
174   RankList dev_list = log_softmax->stage_device_list();
175   ASSERT_EQ(dev_list.size(), 128);
176 }
177 
178 }  // namespace parallel
179 }  // namespace mindspore
180