• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2022 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15import numpy as np
16import pytest
17import mindspore.context as context
18import mindspore.common.dtype as mstype
19from mindspore import Tensor, ops, ParameterTuple, mutable
20from mindspore.ops.composite import GradOperation
21from mindspore.nn import Cell
22
23
24class _Grad(Cell):
25    def __init__(self, grad, network, wrt_params=False, real_inputs_count=None):
26        super().__init__()
27        self.network = network
28        self.grad = grad
29        self.sens_param = self.grad.sens_param
30        self.wrt_params = wrt_params
31        self.real_inputs_count = real_inputs_count
32        if self.wrt_params:
33            self.params = ParameterTuple(self.network.trainable_params())
34
35    def construct(self, *inputs):
36        if self.real_inputs_count is None or self.sens_param is False:
37            if self.wrt_params:
38                return self.grad(self.network, self.params)(*inputs)
39            return self.grad(self.network)(*inputs)
40
41        real_inputs = inputs[:self.real_inputs_count]
42        sense_param_inputs = inputs[self.real_inputs_count:]
43        if self.wrt_params:
44            return self.grad(self.network, self.params)(*real_inputs, sense_param_inputs)
45        return self.grad(self.network)(*real_inputs, sense_param_inputs)
46
47
48class GradOfFirstInput(_Grad):
49    """
50    get grad of first input
51    """
52
53    def __init__(self, network, sens_param=True, real_inputs_count=None):
54        super().__init__(grad=GradOperation(sens_param=sens_param),
55                         network=network, real_inputs_count=real_inputs_count)
56
57
58class CommonFunc():
59    def __init__(self, ms_net, np_net, input_np, input_dyn):
60        super(CommonFunc, self).__init__()
61        self.ms_net = ms_net
62        self.ms_net.set_inputs(input_dyn)
63        self.ms_net.set_grad()
64        self.np_net = np_net
65
66        self.input_np = input_np
67        self.input_np_t = Tensor(input_np)
68        self.out_np = np.array(1).astype(input_np.dtype)
69
70    def forward_cmp(self):
71        out_ms = self.ms_net(self.input_np_t)
72        self.out_np = self.np_net(self.input_np)
73        assert np.all(out_ms.asnumpy() == self.out_np)
74
75    def grad_impl(self):
76        grad_net = GradOfFirstInput(self.ms_net)
77        grad_net.set_train()
78        grad_net(self.input_np_t, Tensor(self.out_np))
79
80
81class DynamicRankCommonFunc():
82    def __init__(self, ms_net, np_net, input_np, axis_np):
83        super().__init__()
84        self.ms_net = ms_net
85        self.input_np_t = Tensor(input_np)
86        self.axis_np_t = Tensor(axis_np)
87        axis_dyn = Tensor(shape=(None,), dtype=self.axis_np_t.dtype)
88        self.ms_net.set_inputs(self.input_np_t, axis_dyn)
89        self.ms_net.set_grad()
90        self.np_net = np_net
91
92        self.input_np = input_np
93        self.axis_np = axis_np
94
95        self.out_np = np.array(1).astype(input_np.dtype)
96
97    def forward_cmp(self):
98        out_ms = self.ms_net(self.input_np_t, self.axis_np_t)
99        self.out_np = self.np_net(self.input_np, self.axis_np)
100        assert np.allclose(out_ms.asnumpy(), self.out_np, rtol=0.0001)
101
102    def grad_impl(self):
103        grad_net = GradOfFirstInput(self.ms_net)
104        grad_net.set_train()
105        grad_net(self.input_np_t, self.axis_np_t, Tensor(self.out_np))
106
107
108@pytest.mark.level2
109@pytest.mark.platform_x86_cpu
110@pytest.mark.platform_x86_gpu_training
111@pytest.mark.platform_arm_ascend_training
112@pytest.mark.platform_x86_ascend_training
113@pytest.mark.env_onecard
114def test_dynamic_getitem_ellipsis():
115    """
116    Feature: Test Tensor slice for dynamic shape in feed mode.
117    Description: The input shape is dynamic and the tensor index is ellipsis.
118    Expectation: Assert the result is equal the numpy result.
119    """
120    class Net(Cell):
121        def construct(self, x):
122            x = x[...]
123            return x
124
125    class NumpyNet():
126        @classmethod
127        def __call__(cls, x):
128            x = x[...]
129            return x
130
131    net_ms = Net()
132    net_np = NumpyNet()
133    dynamic_input = Tensor(shape=(None,), dtype=mstype.float32)
134    input_np = np.random.randn(4).astype(np.float32)
135
136    context.set_context(mode=context.PYNATIVE_MODE)
137    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
138    fact.forward_cmp()
139    fact.grad_impl()
140    context.set_context(mode=context.GRAPH_MODE)
141    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
142    fact.forward_cmp()
143    fact.grad_impl()
144
145
146@pytest.mark.level2
147@pytest.mark.platform_x86_cpu
148@pytest.mark.platform_x86_gpu_training
149@pytest.mark.platform_arm_ascend_training
150@pytest.mark.platform_x86_ascend_training
151@pytest.mark.env_onecard
152def test_dynamic_getitem_bool():
153    """
154    Feature: Test Tensor slice for dynamic shape in feed mode.
155    Description: The input shape is dynamic and the tensor index is bool.
156    Expectation: Assert the result is equal the numpy result.
157    """
158    class Net(Cell):
159        def construct(self, x):
160            x = x[True]
161            return x
162
163    class NumpyNet():
164        @classmethod
165        def __call__(cls, x):
166            x = x[True]
167            return x
168
169    net_ms = Net()
170    net_np = NumpyNet()
171    dynamic_input = Tensor(shape=(None, 3), dtype=mstype.float32)
172    input_np = np.random.randn(2, 3).astype(np.float32)
173
174    context.set_context(mode=context.PYNATIVE_MODE)
175    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
176    fact.forward_cmp()
177    fact.grad_impl()
178    context.set_context(mode=context.GRAPH_MODE)
179    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
180    fact.forward_cmp()
181    fact.grad_impl()
182
183
184@pytest.mark.level2
185@pytest.mark.platform_x86_cpu
186@pytest.mark.platform_x86_gpu_training
187@pytest.mark.platform_arm_ascend_training
188@pytest.mark.platform_x86_ascend_training
189@pytest.mark.env_onecard
190def test_dynamic_getitem_none():
191    """
192    Feature: Test Tensor slice for dynamic shape in feed mode.
193    Description: The input shape is dynamic and the tensor index is None.
194    Expectation: Assert the result is equal the numpy result.
195    """
196    class Net(Cell):
197        def construct(self, x):
198            x = x[None]
199            return x
200
201    class NumpyNet():
202        @classmethod
203        def __call__(cls, x):
204            x = x[None]
205            return x
206
207    net_ms = Net()
208    net_np = NumpyNet()
209    dynamic_input = Tensor(shape=(None, 3), dtype=mstype.float32)
210    input_np = np.random.randn(2, 3).astype(np.float32)
211
212    context.set_context(mode=context.PYNATIVE_MODE)
213    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
214    fact.forward_cmp()
215    fact.grad_impl()
216    context.set_context(mode=context.GRAPH_MODE)
217    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
218    fact.forward_cmp()
219    fact.grad_impl()
220
221
222@pytest.mark.level2
223@pytest.mark.platform_x86_cpu
224@pytest.mark.platform_x86_gpu_training
225@pytest.mark.platform_arm_ascend_training
226@pytest.mark.platform_x86_ascend_training
227@pytest.mark.env_onecard
228def test_dynamic_getitem_tensor():
229    """
230    Feature: Test Tensor slice for dynamic shape in feed mode.
231    Description: The input shape is dynamic and the tensor index is tensor of int.
232    Expectation: Assert the result is equal the numpy result.
233    """
234    class Net(Cell):
235        def __init__(self):
236            super().__init__()
237            self.index = Tensor([0, 1])
238
239        def construct(self, x):
240            index = self.index
241            x = x[index]
242            return x
243
244    class NumpyNet():
245        @classmethod
246        def __call__(cls, x):
247            x = x[[0, 1]]
248            return x
249
250    net_ms = Net()
251    net_np = NumpyNet()
252    dynamic_input = Tensor(shape=(None, 4), dtype=mstype.float32)
253    input_np = np.random.randn(3, 4).astype(np.float32)
254
255    context.set_context(mode=context.PYNATIVE_MODE)
256    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
257    fact.forward_cmp()
258    fact.grad_impl()
259    context.set_context(mode=context.GRAPH_MODE)
260    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
261    fact.forward_cmp()
262    fact.grad_impl()
263
264
265@pytest.mark.level1
266@pytest.mark.platform_x86_cpu
267@pytest.mark.platform_x86_gpu_training
268@pytest.mark.platform_arm_ascend_training
269@pytest.mark.platform_x86_ascend_training
270@pytest.mark.env_onecard
271def test_dynamic_getitem_tensor_001():
272    """
273    Feature: Test Tensor slice for dynamic shape in feed mode.
274    Description: The input shape is dynamic and the tensor index is dynamic shape tensor.
275    Expectation: Assert the result is equal the numpy result.
276    """
277    class Net(Cell):
278        def __init__(self):
279            super().__init__()
280            self.unique = ops.Unique()
281            self.index = Tensor([1, 1, 1, 2])
282
283        def construct(self, x):
284            index = self.unique(self.index)[0]
285            x = x[index]
286            return x
287
288    class NumpyNet():
289        @classmethod
290        def __call__(cls, x):
291            index = np.unique(np.array([1, 1, 1, 2]))
292            x = x[index]
293            return x
294
295    net_ms = Net()
296    net_np = NumpyNet()
297    dynamic_input = Tensor(shape=(None, 3), dtype=mstype.float32)
298    input_np = np.random.randn(3, 3).astype(np.float32)
299
300    context.set_context(mode=context.PYNATIVE_MODE)
301    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
302    fact.forward_cmp()
303    fact.grad_impl()
304    context.set_context(mode=context.GRAPH_MODE)
305    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
306    fact.forward_cmp()
307    fact.grad_impl()
308
309
310@pytest.mark.level1
311@pytest.mark.platform_x86_cpu
312@pytest.mark.platform_x86_gpu_training
313@pytest.mark.platform_arm_ascend_training
314@pytest.mark.platform_x86_ascend_training
315@pytest.mark.env_onecard
316def test_dynamic_getitem_slice():
317    """
318    Feature: Test Tensor slice for dynamic shape in feed mode.
319    Description: The input shape is dynamic and the tensor index is slice.
320    Expectation: Assert the result is equal the numpy result.
321    """
322    class Net(Cell):
323        def construct(self, x):
324            x = x[2:4]
325            return x
326
327    class NumpyNet():
328        @classmethod
329        def __call__(cls, x):
330            x = x[2:4]
331            return x
332
333    net_ms = Net()
334    net_np = NumpyNet()
335    dynamic_input = Tensor(shape=(None, 4), dtype=mstype.float32)
336    input_np = np.random.randn(6, 4).astype(np.float32)
337
338    context.set_context(mode=context.PYNATIVE_MODE)
339    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
340    fact.forward_cmp()
341    fact.grad_impl()
342    context.set_context(mode=context.GRAPH_MODE)
343    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
344    fact.forward_cmp()
345    fact.grad_impl()
346
347
348@pytest.mark.level1
349@pytest.mark.platform_x86_cpu
350@pytest.mark.platform_x86_gpu_training
351@pytest.mark.platform_arm_ascend_training
352@pytest.mark.platform_x86_ascend_training
353@pytest.mark.env_onecard
354def test_dynamic_rank_getitem_slice():
355    """
356    Feature: Test Tensor slice for dynamic rank in feed mode.
357    Description: The input shape is dynamic and the tensor index is slice.
358    Expectation: Assert the result is equal the numpy result.
359    """
360    class Net(Cell):
361        def construct(self, x, axis):
362            x = ops.reduce_sum(x, axis)
363            x = x[2:4]
364            return x
365
366    class NumpyNet():
367        @classmethod
368        def __call__(cls, x, axis):
369            x = x.sum(axis=axis[0]).sum(axis=axis[0])
370            x = x[2:4]
371            return x
372
373    net_ms = Net()
374    net_np = NumpyNet()
375    input_np = np.random.randn(3, 6, 4).astype(np.float32)
376    axis_np = np.array([0, 1])
377
378    context.set_context(mode=context.PYNATIVE_MODE)
379    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
380    fact.forward_cmp()
381    context.set_context(mode=context.GRAPH_MODE)
382    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
383    fact.forward_cmp()
384    fact.grad_impl()
385
386
387@pytest.mark.level1
388@pytest.mark.platform_x86_cpu
389@pytest.mark.platform_x86_gpu_training
390@pytest.mark.platform_arm_ascend_training
391@pytest.mark.platform_x86_ascend_training
392@pytest.mark.env_onecard
393def test_dynamic_rank_getitem_with_single_basic_index():
394    """
395    Feature: Test Tensor slice for dynamic rank in feed mode.
396    Description: The input shape is dynamic and the tensor index is ellipsis/None/Integer/True.
397    Expectation: Assert the result is equal the numpy result.
398    """
399    class Net(Cell):
400        def construct(self, x, axis):
401            x = ops.reduce_sum(x, axis)
402            x = x[...]
403            x = x[1:4:2]
404            x = x[None]
405            x = x[True]
406            return x
407
408    class NumpyNet():
409        @classmethod
410        def __call__(cls, x, axis):
411            x = x.sum(axis=axis[0]).sum(axis=axis[0])
412            x = x[...]
413            x = x[1:4:2]
414            x = x[None]
415            x = x[True]
416            return x
417
418    net_ms = Net()
419    net_np = NumpyNet()
420    input_np = np.random.randn(3, 6, 4, 5).astype(np.int64)
421    axis_np = np.array([0, 1])
422
423    context.set_context(mode=context.GRAPH_MODE)
424    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
425    fact.forward_cmp()
426    fact.grad_impl()
427
428
429@pytest.mark.skip(reason="Need to be fixed.")
430@pytest.mark.level1
431@pytest.mark.platform_x86_cpu
432@pytest.mark.platform_x86_gpu_training
433@pytest.mark.env_onecard
434def test_dynamic_rank_getitem_tuple_with_basic_index():
435    """
436    Feature: Test Tensor slice for dynamic rank in feed mode.
437    Description: The input shape is dynamic and the tensor index is tuple (integer, slice, ellipsis, None).
438    Expectation: Assert the result is equal the numpy result.
439    """
440    class Net(Cell):
441        def construct(self, x, axis):
442            x = ops.reduce_sum(x, axis)
443            x_tensor_shape = ops.dyn_shape(x)[0]
444            x_shape = x.shape[0]
445            x0 = x[1:x_shape:2, 1:x_tensor_shape:2, ..., x_shape-2, None]
446            return x0
447
448    class NumpyNet():
449        @classmethod
450        def __call__(cls, x, axis):
451            x = x.sum(axis=axis[0])
452            x_shape = x.shape[0]
453            x0 = x[1:x_shape:2, 1:x_shape:2, ..., 1, None]
454            return x0
455
456    net_ms = Net()
457    net_np = NumpyNet()
458    input_np = np.random.randn(2, 3, 4, 5, 6).astype(np.float32)
459    axis_np = np.array([0])
460
461    context.set_context(mode=context.GRAPH_MODE)
462    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
463    fact.forward_cmp()
464    fact.grad_impl()
465
466
467@pytest.mark.skip(reason="Need to be fixed.")
468@pytest.mark.level1
469@pytest.mark.platform_x86_cpu
470@pytest.mark.platform_x86_gpu_training
471@pytest.mark.platform_arm_ascend_training
472@pytest.mark.platform_x86_ascend_training
473@pytest.mark.env_onecard
474def test_dynamic_rank_getitem_with_tensor_index():
475    """
476    Feature: Test Tensor slice for dynamic rank in feed mode.
477    Description: The input shape is dynamic and the tensor index is tensor.
478    Expectation: Assert the result is equal the numpy result.
479    """
480    class Net(Cell):
481        def construct(self, x, axis):
482            x = ops.reduce_min(x, axis)
483            x = x[Tensor([1, 1])]
484            x = x[Tensor([True, False])]
485            return x
486
487    class NumpyNet():
488        @classmethod
489        def __call__(cls, x, axis):
490            x = x.min(axis=axis[0]).min(axis=axis[0])
491            x = x[[1, 1]]
492            x = x[[True, False]]
493            return x
494
495    net_ms = Net()
496    net_np = NumpyNet()
497    input_np = np.ones((3, 6, 4, 4)).astype(np.int64)
498    axis_np = np.array([0, 1])
499
500    context.set_context(mode=context.GRAPH_MODE)
501    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
502    fact.forward_cmp()
503    fact.grad_impl()
504
505
506@pytest.mark.level1
507@pytest.mark.platform_x86_cpu
508@pytest.mark.platform_x86_gpu_training
509@pytest.mark.env_onecard
510def test_dynamic_rank_getitem_tuple_with_multi_tensor_index():
511    """
512    Feature: Test Tensor slice for dynamic rank in feed mode.
513    Description: The input shape is dynamic and the tensor index is multy tensors.
514    Expectation: Assert the result is equal the numpy result.
515    """
516    class Net(Cell):
517        def construct(self, x, axis):
518            x = ops.reduce_min(x, axis)
519            x0 = x[Tensor(np.ones((25), int)), :,
520                   Tensor(np.ones((5, 5), bool))]
521            x0 = x0[x0.min(), 0:1]
522            return x0
523
524    class NumpyNet():
525        @classmethod
526        def __call__(cls, x, axis):
527            x = x.min(axis=axis[0])
528            x0 = x[np.ones((25), int), :, np.ones((5, 5), bool)]
529            x0 = x0[x0.min(), 0:1]
530            return x0
531
532    net_ms = Net()
533    net_np = NumpyNet()
534    input_np = np.ones((3, 6, 5, 5, 5)).astype(np.int64)
535    axis_np = np.array([0])
536
537    context.set_context(mode=context.GRAPH_MODE)
538    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
539    fact.forward_cmp()
540    fact.grad_impl()
541
542
543@pytest.mark.level1
544@pytest.mark.platform_x86_cpu
545@pytest.mark.platform_x86_gpu_training
546@pytest.mark.platform_arm_ascend_training
547@pytest.mark.platform_x86_ascend_training
548@pytest.mark.env_onecard
549def test_dynamic_rank_getitem_with_list_index():
550    """
551    Feature: Test Tensor slice for dynamic rank in feed mode.
552    Description: The input shape is dynamic and the tensor index is List.
553    Expectation: Assert the result is equal the numpy result.
554    """
555    class Net(Cell):
556        def construct(self, x, axis):
557            x = ops.reduce_min(x, axis)
558            index = mutable([1, 2])
559            x = x[index]
560            return x
561
562    class NumpyNet():
563        @classmethod
564        def __call__(cls, x, axis):
565            x = x.min(axis=axis[0]).min(axis=axis[0])
566            x = x[[1, 2]]
567            return x
568
569    net_ms = Net()
570    net_np = NumpyNet()
571    input_np = np.ones((3, 6, 3, 4)).astype(np.int64)
572    axis_np = np.array([0, 1])
573
574    context.set_context(mode=context.GRAPH_MODE)
575    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
576    fact.forward_cmp()
577
578
579@pytest.mark.level1
580@pytest.mark.platform_x86_cpu
581@pytest.mark.platform_x86_gpu_training
582@pytest.mark.env_onecard
583def test_dynamic_rank_getitem_tuple_with_mix_index():
584    """
585    Feature: Test Tensor slice for dynamic rank in feed mode.
586    Description: The input shape is dynamic and the tensor index is tuple
587     (integer, slice, ellipsis, tensor, bool ,list).
588    Expectation: Assert the result is equal the numpy result.
589    """
590    class Net(Cell):
591        def construct(self, x, axis):
592            x = ops.reduce_min(x, axis)
593            x0 = x[Tensor(1), 1, ..., [1, 2], None]
594            return x0
595
596    class NumpyNet():
597        @classmethod
598        def __call__(cls, x, axis):
599            x = x.min(axis=axis[0])
600            x0 = x[np.array(1), 1, ..., [1, 2], None]
601            return x0
602
603    net_ms = Net()
604    net_np = NumpyNet()
605    input_np = np.random.randn(3, 4, 5, 6, 7, 8).astype(np.int64)
606    axis_np = np.array([0])
607
608    context.set_context(mode=context.GRAPH_MODE)
609    fact = DynamicRankCommonFunc(net_ms, net_np, input_np, axis_np)
610    fact.forward_cmp()
611    fact.grad_impl()
612
613
614@pytest.mark.level2
615@pytest.mark.platform_x86_cpu
616@pytest.mark.platform_x86_gpu_training
617@pytest.mark.platform_arm_ascend_training
618@pytest.mark.platform_x86_ascend_training
619@pytest.mark.env_onecard
620def test_dynamic_getitem_slice_001():
621    """
622    Feature: Test Tensor slice for dynamic shape in feed mode.
623    Description: The input shape is dynamic and the tensor index is slice with negative int.
624    Expectation: Assert the result is equal the numpy result.
625    """
626    class Net(Cell):
627        def construct(self, x):
628            x = x[-3:-1]
629            return x
630
631    class NumpyNet():
632        @classmethod
633        def __call__(cls, x):
634            x = x[-3:-1]
635            return x
636
637    net_ms = Net()
638    net_np = NumpyNet()
639    dynamic_input = Tensor(shape=(None, 4), dtype=mstype.float32)
640    input_np = np.random.randn(6, 4).astype(np.float32)
641
642    context.set_context(mode=context.PYNATIVE_MODE)
643    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
644    fact.forward_cmp()
645    fact.grad_impl()
646    context.set_context(mode=context.GRAPH_MODE)
647    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
648    fact.forward_cmp()
649    fact.grad_impl()
650
651
652@pytest.mark.level1
653@pytest.mark.platform_x86_cpu
654@pytest.mark.platform_x86_gpu_training
655@pytest.mark.platform_arm_ascend_training
656@pytest.mark.platform_x86_ascend_training
657@pytest.mark.env_onecard
658def test_dynamic_getitem_int():
659    """
660    Feature: Test Tensor slice for dynamic shape in feed mode.
661    Description: The input shape is dynamic and the tensor index is int.
662    Expectation: Assert the result is equal the numpy result.
663    """
664    class Net(Cell):
665        def construct(self, x):
666            x = x[-3]
667            return x
668
669    class NumpyNet():
670        @classmethod
671        def __call__(cls, x):
672            x = x[-3]
673            return x
674
675    net_ms = Net()
676    net_np = NumpyNet()
677    dynamic_input = Tensor(shape=(None, 4), dtype=mstype.float32)
678    input_np = np.random.randn(3, 4).astype(np.float32)
679
680    context.set_context(mode=context.PYNATIVE_MODE)
681    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
682    fact.forward_cmp()
683    fact.grad_impl()
684    context.set_context(mode=context.GRAPH_MODE)
685    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
686    fact.forward_cmp()
687    fact.grad_impl()
688
689
690@pytest.mark.level2
691@pytest.mark.platform_x86_cpu
692@pytest.mark.platform_x86_gpu_training
693@pytest.mark.platform_arm_ascend_training
694@pytest.mark.platform_x86_ascend_training
695@pytest.mark.env_onecard
696def test_dynamic_getitem_int_001():
697    """
698    Feature: Test Tensor slice for dynamic shape in feed mode.
699    Description: The input shape is dynamic and the tensor index is int with control flow.
700    Expectation: Assert the result is equal the numpy result.
701    """
702    class Net(Cell):
703        def __init__(self):
704            super().__init__()
705            self.extra = 0
706
707        def construct(self, x):
708            index = 1 if self.extra > 1 else 2
709            x = x[index]
710            return x
711
712    class NumpyNet():
713        @classmethod
714        def __call__(cls, x):
715            x = x[2]
716            return x
717
718    net_ms = Net()
719    net_np = NumpyNet()
720    dynamic_input = Tensor(shape=(None, 2), dtype=mstype.float32)
721    input_np = np.random.randn(3, 2).astype(np.float32)
722
723    context.set_context(mode=context.PYNATIVE_MODE)
724    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
725    fact.forward_cmp()
726    fact.grad_impl()
727    context.set_context(mode=context.GRAPH_MODE)
728    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
729    fact.forward_cmp()
730    fact.grad_impl()
731
732
733@pytest.mark.level1
734@pytest.mark.platform_x86_cpu
735@pytest.mark.platform_x86_gpu_training
736@pytest.mark.platform_arm_ascend_training
737@pytest.mark.platform_x86_ascend_training
738@pytest.mark.env_onecard
739def test_dynamic_getitem_int_002():
740    """
741    Feature: Test Tensor slice for twice for dynamic shape in feed mode.
742    Description: The input shape is dynamic and the tensor index is int.
743    Expectation: Assert the result is equal the numpy result.
744    """
745    class Net(Cell):
746        def construct(self, x):
747            x = x[3][4]
748            return x
749
750    class NumpyNet():
751        @classmethod
752        def __call__(cls, x):
753            x = x[3][4]
754            return x
755
756    net_ms = Net()
757    net_np = NumpyNet()
758    dynamic_input = Tensor(shape=(None, None, 3), dtype=mstype.float32)
759    input_np = np.random.randn(5, 5, 3).astype(np.float32)
760
761    context.set_context(mode=context.PYNATIVE_MODE)
762    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
763    fact.forward_cmp()
764    fact.grad_impl()
765    context.set_context(mode=context.GRAPH_MODE)
766    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
767    fact.forward_cmp()
768    fact.grad_impl()
769
770
771@pytest.mark.level2
772@pytest.mark.platform_x86_cpu
773@pytest.mark.platform_x86_gpu_training
774@pytest.mark.platform_arm_ascend_training
775@pytest.mark.platform_x86_ascend_training
776@pytest.mark.env_onecard
777def test_dynamic_getitem_list():
778    """
779    Feature: Test Tensor slice for dynamic shape in feed mode.
780    Description: The input shape is dynamic and the tensor index is list of bool and int.
781    Expectation: Assert the result is equal the numpy result.
782    """
783    class Net(Cell):
784        def construct(self, x):
785            index = [False, 1]
786            x = x[index]
787            return x
788
789    class NumpyNet():
790        @classmethod
791        def __call__(cls, x):
792            index = [False, 1]
793            x = x[index]
794            return x
795
796    net_ms = Net()
797    net_np = NumpyNet()
798    dynamic_input = Tensor(shape=(None,), dtype=mstype.float32)
799    input_np = np.random.randn(5).astype(np.float32)
800
801    context.set_context(mode=context.PYNATIVE_MODE)
802    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
803    fact.forward_cmp()
804    fact.grad_impl()
805    context.set_context(mode=context.GRAPH_MODE)
806    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
807    fact.forward_cmp()
808    fact.grad_impl()
809
810
811@pytest.mark.platform_x86_cpu
812@pytest.mark.platform_x86_gpu_training
813@pytest.mark.platform_arm_ascend_training
814@pytest.mark.platform_x86_ascend_training
815@pytest.mark.env_onecard
816def test_dynamic_getitem_tuple():
817    """
818    Feature: Test Tensor slice for dynamic shape in feed mode.
819    Description: The input shape is dynamic and the tensor index is tuple of tensor and slice.
820    Expectation: Assert the result is equal the numpy result.
821    """
822    class Net(Cell):
823        def __init__(self):
824            super().__init__()
825            self.extra = Tensor(0)
826            self.extra2 = Tensor(2)
827
828        def construct(self, x):
829            x = x[self.extra, self.extra:self.extra2, ...]
830            return x
831
832    class NumpyNet():
833        @classmethod
834        def __call__(cls, x):
835            x = x[0, 0:2, ...]
836            return x
837
838    net_ms = Net()
839    net_np = NumpyNet()
840    dynamic_input = Tensor(shape=(2, None, 3), dtype=mstype.float32)
841    input_np = np.random.randn(2, 4, 3).astype(np.float32)
842
843    context.set_context(mode=context.GRAPH_MODE)
844    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
845    fact.forward_cmp()
846    fact.grad_impl()
847
848
849@pytest.mark.level1
850@pytest.mark.platform_x86_cpu
851@pytest.mark.platform_x86_gpu_training
852@pytest.mark.platform_arm_ascend_training
853@pytest.mark.platform_x86_ascend_training
854@pytest.mark.env_onecard
855def test_dynamic_getitem_tuple_001():
856    """
857    Feature: Test Tensor slice for dynamic shape in feed mode.
858    Description: The input shape is dynamic and the tensor index is tuple of advanced indices.
859    Expectation: Assert the result is equal the numpy result.
860    """
861    class Net(Cell):
862        def construct(self, x):
863            index = (..., True, 4, slice(0, 2), None)
864            x = x[index]
865            return x
866
867    class NumpyNet():
868        @classmethod
869        def __call__(cls, x):
870            index = (..., True, 4, slice(0, 2), None)
871            x = x[index]
872            return x
873
874    net_ms = Net()
875    net_np = NumpyNet()
876    dynamic_input = Tensor(shape=(3, 4, None, 2), dtype=mstype.float32)
877    input_np = np.random.randn(3, 4, 5, 2).astype(np.float32)
878
879    context.set_context(mode=context.GRAPH_MODE)
880    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
881    fact.forward_cmp()
882    fact.grad_impl()
883
884
885@pytest.mark.level1
886@pytest.mark.platform_x86_cpu
887@pytest.mark.platform_x86_gpu_training
888@pytest.mark.platform_arm_ascend_training
889@pytest.mark.platform_x86_ascend_training
890@pytest.mark.env_onecard
891def test_dynamic_getitem_tuple_002():
892    """
893    Feature: Test Tensor slice for twice for dynamic shape in feed mode.
894    Description: The input shape is dynamic and the tensor index is tuple of advanced indices.
895    Expectation: Assert the result is equal the numpy result.
896    """
897    class Net(Cell):
898        def __init__(self):
899            super().__init__()
900            self.extra = Tensor([2, 3])
901
902        def construct(self, x):
903
904            x = x[True, [1, 2]][..., self.extra]
905            return x
906
907    class NumpyNet():
908        @classmethod
909        def __call__(cls, x):
910            x = x[True, [1, 2]][..., [2, 3]]
911            return x
912
913    net_ms = Net()
914    net_np = NumpyNet()
915    dynamic_input = Tensor(shape=(None, 4, 5, 2, None),
916                           dtype=mstype.float32)  # (1,2,4,5,2,None)
917    input_np = np.random.randn(3, 4, 5, 2, 4).astype(np.float32)
918
919    context.set_context(mode=context.GRAPH_MODE)
920    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
921    fact.forward_cmp()
922    fact.grad_impl()
923
924
925@pytest.mark.level1
926@pytest.mark.platform_x86_cpu
927@pytest.mark.platform_x86_gpu_training
928@pytest.mark.platform_arm_ascend_training
929@pytest.mark.platform_x86_ascend_training
930@pytest.mark.env_onecard
931def test_dynamic_getitem_tuple_003():
932    """
933    Feature: Test Tensor slice for twice for dynamic shape in feed mode.
934    Description: The input shape is dynamic and the tensor index is tuple of advanced indices.
935    Expectation: Assert the result is equal the numpy result.
936    """
937    class Net(Cell):
938        def construct(self, x):
939            x = x[:, :, :, :1]
940            return x
941
942    class NumpyNet():
943        @classmethod
944        def __call__(cls, x):
945            x = x[:, :, :, :1]
946            return x
947
948    net_ms = Net()
949    net_np = NumpyNet()
950    dynamic_input = Tensor(shape=(4, None, 5, None, 6, None),
951                           dtype=mstype.float32)  # (1,2,4,5,2,None)
952    input_np = np.random.randn(4, 4, 5, 5, 6, 4).astype(np.float32)
953
954    context.set_context(mode=context.PYNATIVE_MODE)
955    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
956    fact.forward_cmp()
957    fact.grad_impl()
958    context.set_context(mode=context.GRAPH_MODE)
959    fact = CommonFunc(net_ms, net_np, input_np, dynamic_input)
960    fact.forward_cmp()
961    fact.grad_impl()
962