• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2019 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import numpy as np
17import pytest
18
19import mindspore.context as context
20import mindspore.nn as nn
21from mindspore import Tensor
22from mindspore.ops import operations as P
23from mindspore.ops.operations import _inner_ops as inner
24from mindspore.common.parameter import Parameter
25from mindspore.common.initializer import initializer
26
27
28class NetConv2d(nn.Cell):
29    def __init__(self):
30        super(NetConv2d, self).__init__()
31        out_channel = 2
32        kernel_size = 1
33        self.conv = P.Conv2D(out_channel,
34                             kernel_size,
35                             mode=1,
36                             pad_mode="valid",
37                             pad=0,
38                             stride=1,
39                             dilation=1,
40                             group=1)
41
42    def construct(self, x, w):
43        return self.conv(x, w)
44
45
46@pytest.mark.level0
47@pytest.mark.platform_x86_gpu_training
48@pytest.mark.env_onecard
49def test_conv2d():
50    x = Tensor(np.arange(1 * 3 * 3 * 3).reshape(1, 3, 3, 3).astype(np.float32))
51    w = Tensor(np.arange(2 * 3 * 1 * 1).reshape(2, 3, 1, 1).astype(np.float32))
52    expect = np.array([[[[45, 48, 51],
53                         [54, 57, 60],
54                         [63, 66, 69]],
55                        [[126, 138, 150],
56                         [162, 174, 186],
57                         [198, 210, 222]]]]).astype(np.float32)
58
59    context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU", max_device_memory="0.2GB")
60    conv2d = NetConv2d()
61    output = conv2d(x, w)
62    assert (output.asnumpy() == expect).all()
63    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
64    conv2d = NetConv2d()
65    output = conv2d(x, w)
66    assert (output.asnumpy() == expect).all()
67
68
69class NetConv(nn.Cell):
70    def __init__(self, weight, x):
71        super(NetConv, self).__init__()
72        self.conv = nn.Conv2d(in_channels=3,
73                              out_channels=3,
74                              kernel_size=(5, 3),
75                              stride=2,
76                              pad_mode='same',
77                              padding=(0, 0, 0, 0),
78                              dilation=(1, 1),
79                              group=1,
80                              has_bias=False,
81                              weight_init=Tensor(weight)
82                              )
83        self.x = Parameter(initializer(Tensor(x), [1, 3, 4, 2]), name="x")
84
85    def construct(self):
86        return self.conv(self.x)
87
88
89@pytest.mark.level0
90@pytest.mark.platform_x86_gpu_training
91@pytest.mark.env_onecard
92def test_conv():
93    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
94    weight = np.array([[[[0.38968208, 0.14398979, 0.7962463],
95                         [-2.1836321, -0.63823014, -0.50588065],
96                         [0.6660469, 0.64673275, -0.13160042],
97                         [1.3683757, 1.4005762, -0.37235805],
98                         [-0.22638111, 0.45427424, -0.10293389]],
99                        [[1.4985064, -0.29318333, -0.92694616],
100                         [1.539068, 0.8937254, -1.2598171],
101                         [0.9658142, -0.63945454, -0.23185322],
102                         [1.363089, -0.41694695, -2.2750475],
103                         [-0.4865508, -1.6938025, 0.609849]],
104                        [[1.1844803, 0.99874926, -1.9475793],
105                         [0.4987858, 0.5307887, -0.04226681],
106                         [0.4529779, -1.1960793, 0.9456575],
107                         [3.133675, 0.2309789, -0.29201075],
108                         [-0.59632736, -0.0789804, -0.69486314]]],
109                       [[[-0.5606142, 0.6420862, 0.2478745],
110                         [0.02717604, 1.5483379, -0.9373383],
111                         [-1.1017276, -0.259478, 1.0311872],
112                         [1.8387799, 0.16468556, 0.33392152],
113                         [-1.8781787, 1.0158662, 1.6527579]],
114
115                        [[0.45696944, -0.5652523, -1.5618048],
116                         [-0.30304828, 0.1331878, -0.36955845],
117                         [0.91655576, 0.66612357, 0.3068175],
118                         [-0.45732066, 0.8923335, 1.0542952],
119                         [-0.73519516, 1.0518405, -1.0273266]],
120
121                        [[-0.79712886, -0.26814285, 0.12779616],
122                         [1.0367643, -1.6180774, 0.42999932],
123                         [-0.81818223, -0.81502074, 0.882194],
124                         [0.53640485, 0.4178927, 1.6037121],
125                         [0.9256354, -1.1006796, 0.16614541]]],
126
127                       [[[-1.5216796, -1.2473261, 0.6549515],
128                         [0.63627815, 0.7221449, 0.02977821],
129                         [-0.61331123, -0.49451825, 0.33852202],
130                         [1.4510741, -1.3818305, -0.791747],
131                         [0.6989747, 0.49558765, 1.0813237]],
132
133                        [[-0.03969796, 0.71586496, 0.8326594],
134                         [-0.15443641, 1.0389746, -0.59301984],
135                         [0.7197836, 0.03257621, 1.8398637],
136                         [0.6111736, -0.16166899, -2.4869773],
137                         [1.3066711, -1.8003578, 0.17412892]],
138
139                        [[-0.31470737, -0.5938182, -1.1311078],
140                         [-0.99081016, 0.4005125, 0.44154453],
141                         [1.0876914, -2.5958562, -0.5914863],
142                         [1.3759689, -0.7741513, 0.19928917],
143                         [1.6792973, 2.2744863, -0.04308867]]]]).astype(np.float32)
144    x = np.array([[[[-1.4311737, 1.015344],
145                    [0.04431088, -2.2886624],
146                    [1.4832113, 1.240908],
147                    [0.67040104, 0.15266363]],
148
149                   [[0.44226435, 1.1461105],
150                    [1.194218, 1.5547837],
151                    [0.23152256, 1.5911953],
152                    [0.11206784, 0.17978816]],
153
154                   [[-0.57803905, 0.8039611],
155                    [0.0823025, -0.6134477],
156                    [-1.4171146, 1.6269946],
157                    [0.48878875, 0.9117505]]]]).astype(np.float32)
158    conv2d = NetConv(weight, x)
159    output = conv2d()
160    expected = np.array([[[[2.3498724],
161                           [-1.9199573]],
162                          [[5.376562],
163                           [-5.425745]],
164                          [[5.9105043],
165                           [7.469034]]]]).astype(np.float32)
166    loss = np.abs(expected - output.asnumpy())
167    error = 1e-4 * np.ones(loss.shape)
168    assert (loss < error).all()
169
170
171class NetConv2dDynamic(nn.Cell):
172    def __init__(self, axis=0, out_nums=1):
173        super(NetConv2dDynamic, self).__init__()
174        self.dynshape = inner.GpuConvertToDynamicShape()
175        out_channel = 2
176        kernel_size = 1
177        self.conv = P.Conv2D(out_channel,
178                             kernel_size,
179                             mode=1,
180                             pad_mode="valid",
181                             pad=0,
182                             stride=1,
183                             dilation=1,
184                             group=1)
185
186    def construct(self, x, w):
187        x_dyn = self.dynshape(x)
188        w_dyn = self.dynshape(w)
189        x_conv = self.conv(x_dyn, w_dyn)
190        return x_conv
191
192
193@pytest.mark.level0
194@pytest.mark.platform_x86_gpu_training
195@pytest.mark.env_onecard
196def test_conv2d_dynamic():
197    x1 = Tensor(np.arange(1 * 3 * 3 * 3).reshape(1, 3, 3, 3).astype(np.float32))
198    w1 = Tensor(np.arange(2 * 3 * 1 * 1).reshape(2, 3, 1, 1).astype(np.float32))
199    expect1 = np.array([[[[45, 48, 51],
200                          [54, 57, 60],
201                          [63, 66, 69]],
202                         [[126, 138, 150],
203                          [162, 174, 186],
204                          [198, 210, 222]]]]).astype(np.float32)
205
206    x2 = Tensor(np.arange(5 * 1 * 2 * 2).reshape(5, 1, 2, 2).astype(np.float32))
207    w2 = Tensor(np.arange(2 * 1 * 1 * 1).reshape(2, 1, 1, 1).astype(np.float32))
208    expect2 = np.array([[[[0., 0.],
209                          [0., 0.]],
210                         [[0., 1.],
211                          [2., 3.]]],
212                        [[[0., 0.],
213                          [0., 0.]],
214                         [[4., 5.],
215                          [6., 7.]]],
216                        [[[0., 0.],
217                          [0., 0.]],
218                         [[8., 9.],
219                          [10., 11.]]],
220                        [[[0., 0.],
221                          [0., 0.]],
222                         [[12., 13.],
223                          [14., 15.]]],
224                        [[[0., 0.],
225                          [0., 0.]],
226                         [[16., 17.],
227                          [18., 19.]]]]).astype(np.float32)
228
229    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
230    conv2d = NetConv2dDynamic()
231    output1 = conv2d(x1, w1)
232    assert (output1.asnumpy() == expect1).all()
233    output2 = conv2d(x2, w2)
234    assert (output2.asnumpy() == expect2).all()
235
236
237class NetConvNHWC(nn.Cell):
238    def __init__(self, weight, x):
239        super(NetConvNHWC, self).__init__()
240        self.conv = nn.Conv2d(in_channels=1,
241                              out_channels=3,
242                              kernel_size=2,
243                              stride=2,
244                              pad_mode="valid",
245                              weight_init=Tensor(weight),
246                              data_format='NHWC'
247                              )
248        self.x = Parameter(initializer(Tensor(x), [1, 4, 4, 1]), name="x")
249
250    def construct(self):
251        return self.conv(self.x)
252
253
254@pytest.mark.level0
255@pytest.mark.platform_x86_gpu_training
256@pytest.mark.env_onecard
257def test_conv_NHWC():
258    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
259    x1 = Tensor(np.arange(1 * 4 * 4 * 1).reshape(1, 4, 4, 1).astype(np.float32))
260    w1 = Tensor(np.arange(3 * 2 * 2 * 1).reshape(3, 2, 2, 1).astype(np.float32))
261    expected = np.array([[[[24., 64., 104.],
262                           [36., 108., 180.]],
263                          [[72., 240., 408.],
264                           [84., 284., 484.]]]]).astype(np.float32)
265    conv2d = NetConvNHWC(w1, x1)
266    output = conv2d()
267    assert (output.asnumpy() == expected).all()
268