• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15"""
16@File  : test_parse.py
17@Author:
18@Date  : 2019-01-23 17:13
19@Desc  :
20"""
21import logging
22import pytest
23import numpy as np
24
25import mindspore as ms
26import mindspore.nn as nn
27from mindspore import Tensor
28from mindspore import context
29from mindspore.ops import composite as C
30from mindspore.ops import operations as P
31from mindspore.common.api import ms_function, _cell_graph_executor
32from mindspore.ops._grad.grad_base import bprop_getters
33from mindspore.ops.primitive import prim_attr_register, PrimitiveWithInfer
34from mindspore.ops.functional import tensor_add
35from ...ut_filter import non_graph_engine
36
37# pylint: disable=W0613,W0612
38# W0613: unused-argument
39
40@pytest.fixture(name='enable_check_bprop')
41def fixture_enable_check_bprop():
42    context.set_context(check_bprop=True)
43    yield
44    context.set_context(check_bprop=False)
45
46
47grad_all = C.GradOperation(get_all=True)
48
49
50log = logging.getLogger("test")
51log.setLevel(level=logging.ERROR)
52context.set_context(mode=context.GRAPH_MODE)
53
54# Test case: use the parse obj interface use default parameter
55class Net(nn.Cell):
56    """ Net definition """
57
58    def __init__(self, dim):
59        super(Net, self).__init__()
60        self.softmax1 = nn.Softmax(dim)
61        self.softmax2 = nn.Softmax(dim + 1)
62
63    def construct(self, input_data, input1=1+2+3+4):
64        return self.softmax1(input_data)
65
66
67@non_graph_engine
68def test_parse_defalut_parameter_case2():
69    """ test_parse_defalut_parameter_case2 """
70    log.debug("begin test_parse_defalut_parameter_case2")
71    net = Net(0)
72    npd = np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32')
73    log.debug("input value is: %r", npd)
74    input_data = ms.Tensor(npd)
75    input_data.set_dtype(ms.float32)
76
77    log.debug("start run")
78    output = net(input_data)
79
80    value = output.asnumpy()
81    log.debug("output value = %r", value)
82
83
84# Test case: use the variable parameter for parse object
85class Net1(nn.Cell):
86    """ Net1 definition """
87
88    def __init__(self):
89        super(Net1, self).__init__()
90
91    def construct(self, *args):
92        x = args[0]
93        return x
94
95
96def test_var_parameter_case2():
97    """ test_var_parameter_case2 """
98    log.debug("begin test_var_parameter_case2")
99    net = Net1()
100    npd = np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32')
101    log.debug("input value is: %r", npd)
102    input_data = ms.Tensor(npd)
103    input_data.set_dtype(ms.float32)
104
105    np1 = np.random.randn(2, 3, 4, 5).astype(np.float32)
106    input1 = ms.Tensor(np1)
107    np2 = np.random.randn(2, 3, 4, 5).astype(np.float32)
108    input2 = ms.Tensor(np2)
109
110    _cell_graph_executor.compile(net, input_data, input1, input2)
111
112
113# Test case: test the global flag
114g_x = Tensor(np.ones([3, 3]).astype(np.float32))
115
116
117@ms_function
118def tensor_add_global(x):
119    """ tensor_add_global """
120    global g_x
121    res = tensor_add(x, g_x)
122    return res
123
124
125@non_graph_engine
126def test_global_flag():
127    """ test_global_flag """
128    log.debug("begin test_global_flag")
129    x = Tensor(np.ones([3, 3]).astype(np.float32))
130    res = tensor_add_global(x)
131    log.debug("finished test_global_flag, ret = %r", res)
132
133
134class NetWithNDarray(nn.Cell):
135    """ NetWithNDarray definition """
136
137    def __init__(self, dim):
138        super(NetWithNDarray, self).__init__()
139        self.softmax = nn.Softmax(dim)
140        self.x = ms.Tensor(np.ones(shape=(1)).astype(np.float32))
141
142    def construct(self, input_data):
143        return self.softmax(input_data) * self.x
144
145
146@non_graph_engine
147def test_net_with_ndarray():
148    """ test_net_with_ndarray """
149    net = NetWithNDarray(0)
150    input_data = np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32')
151
152    net(ms.Tensor(input_data))
153
154
155def test_bprop_with_wrong_output_num(enable_check_bprop):
156    class BpropWithWrongOutputNum(PrimitiveWithInfer):
157        @prim_attr_register
158        def __init__(self):
159            super(BpropWithWrongOutputNum, self).__init__('BpropWithWrongOutputNum')
160
161        def __call__(self, x, y):
162            return x
163
164        def infer_shape(self, x_shape, yshape):
165            return x_shape
166
167        def infer_dtype(self, x_type, y_type):
168            return x_type
169
170    @bprop_getters.register(BpropWithWrongOutputNum)
171    def get_bprop_with_wrong_output_num(self):
172        """Generate bprop for BpropWithWrongOutputNum"""
173
174        def bprop(x, y, out, dout):
175            return (dout,)
176
177        return bprop
178
179    class BpropWithWrongOutputNumCell(nn.Cell):
180        def __init__(self):
181            super(BpropWithWrongOutputNumCell, self).__init__()
182
183        def construct(self, x, y):
184            return BpropWithWrongOutputNum()(x, y)
185
186    with pytest.raises(ValueError):
187        grad_all(BpropWithWrongOutputNumCell())(Tensor(np.array(1).astype(np.int32)),
188                                                Tensor(np.array(2).astype(np.int32)))
189
190def test_bprop_with_wrong_output_type(enable_check_bprop):
191    class BpropWithWrongOutputType(PrimitiveWithInfer):
192        @prim_attr_register
193        def __init__(self):
194            super(BpropWithWrongOutputType, self).__init__('BpropWithWrongOutputType')
195
196        def __call__(self, x):
197            return x
198
199        def infer_shape(self, x_shape):
200            return x_shape
201
202        def infer_dtype(self, x_type):
203            return x_type
204
205    @bprop_getters.register(BpropWithWrongOutputType)
206    def get_bprop_with_wrong_output_type(self):
207        """Generate bprop for BpropWithWrongOutputType"""
208
209        def bprop(x, out, dout):
210            return (1,)
211
212        return bprop
213
214    class BpropWithWrongOutputTypeCell(nn.Cell):
215        def __init__(self):
216            super(BpropWithWrongOutputTypeCell, self).__init__()
217
218        def construct(self, x):
219            return BpropWithWrongOutputType()(x)
220
221    with pytest.raises(TypeError):
222        grad_all(BpropWithWrongOutputTypeCell())(Tensor(np.ones([64, 10]).astype(np.int32)))
223
224
225def test_bprop_with_wrong_output_shape(enable_check_bprop):
226    class BpropWithWrongOutputShape(PrimitiveWithInfer):
227        @prim_attr_register
228        def __init__(self):
229            super(BpropWithWrongOutputShape, self).__init__('BpropWithWrongOutputShape')
230
231        def __call__(self, x):
232            return x
233
234        def infer_shape(self, x_shape):
235            return x_shape
236
237        def infer_dtype(self, x_type):
238            return x_type
239
240    @bprop_getters.register(BpropWithWrongOutputShape)
241    def get_bprop_with_wrong_output_shape(self):
242        """Generate bprop for BpropWithWrongOutputShape"""
243        ones = Tensor(np.ones([2,]).astype(np.int32))
244
245        def bprop(x, out, dout):
246            return (ones,)
247
248        return bprop
249
250    class BpropWithWrongOutputShapeCell(nn.Cell):
251        def __init__(self):
252            super(BpropWithWrongOutputShapeCell, self).__init__()
253
254        def construct(self, x):
255            return BpropWithWrongOutputShape()(x)
256
257    with pytest.raises(ValueError):
258        net = BpropWithWrongOutputShapeCell()
259        net.set_grad()
260        grad_all(net)(Tensor(np.ones([64, 10]).astype(np.int32)))
261
262class AssignWhenInsertGrad(nn.Cell):
263    """ NetWithNDarray definition """
264
265    def __init__(self):
266        super(AssignWhenInsertGrad, self).__init__()
267        self.gather = P.Gather()
268        self.damping = Tensor(np.array([0.03, 0.03]).astype(np.float32))
269        self.cov_step = ms.Parameter(0, name="cov_step", requires_grad=False)
270        self.freq = Tensor(278, ms.int32)
271        self.getG = P.InsertGradientOf(self.save_gradient)
272
273    def save_gradient(self, dout):
274        self.cov_step = self.cov_step + self.freq
275        return dout
276
277    def construct(self, x):
278        self.gather(self.damping, self.cov_step, 0)
279        out = P.ReLU()(x)
280        out = self.getG(out)
281        return out
282
283grad_all = C.GradOperation(get_all=True)
284
285class GradNet(nn.Cell):
286    def __init__(self, net):
287        super(GradNet, self).__init__()
288        self.net = net
289
290    def construct(self, *inputs):
291        out = self.net(*inputs)
292        return out, grad_all(self.net)(*inputs)
293
294def test_assign_in_insert_grad():
295    context.set_context(mode=context.GRAPH_MODE)
296    net = AssignWhenInsertGrad().to_float(ms.float16)
297    input_data = np.array([[1.2, 2.1], [2.2, 3.2]]).astype('float32')
298    net_back = GradNet(net)
299    net_back(ms.Tensor(input_data))
300
301class Assign(nn.Cell):
302    """ NetWithNDarray definition """
303
304    def __init__(self):
305        super(Assign, self).__init__()
306        self.cov_step = ms.Parameter(0.0, name="cov_step", requires_grad=False)
307
308    def construct(self, x):
309        self.cov_step = self.cov_step + x
310        return self.cov_step
311
312
313def test_assign(enable_check_bprop):
314    context.set_context(mode=context.GRAPH_MODE)
315    net = Assign()
316    input_data = ms.Tensor(np.array(1).astype(np.int32))
317    net_back = GradNet(net)
318    net_back(input_data)
319
320class AssignCheck(nn.Cell):
321    """ NetWithNDarray definition """
322
323    def __init__(self):
324        super(AssignCheck, self).__init__()
325        self.cov_step = ms.Parameter(0.0, name="cov_step", requires_grad=False)
326
327    def construct(self, x):
328        self.cov_step = x
329        return self.cov_step
330
331
332def test_assign_check_none():
333    context.set_context(mode=context.GRAPH_MODE)
334    net = AssignCheck()
335    with pytest.raises(TypeError):
336        net(None)
337