• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2024 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15# pylint: disable=unused-variable
16import pytest
17import numpy as np
18import mindspore as ms
19from mindspore.common import dtype as mstype
20from mindspore import ops, mint, Tensor, jit, JitConfig, context, nn
21from tests.st.ops.dynamic_shape.test_op_utils import TEST_OP
22
23
24class FullNet(nn.Cell):
25    def __init__(self):
26        super().__init__()
27        self.full = mint.full
28
29    def construct(self, size, fill_value, dtype):
30        return self.full(size, fill_value, dtype=dtype)
31
32class FullGradNet(nn.Cell):
33    def __init__(self):
34        super().__init__()
35        self.grad_func = ops.GradOperation(get_all=True)
36
37    def wrap_full(self, size, fill_value, dtype):
38        return mint.full(size, fill_value, dtype=dtype)
39
40    def construct(self, size, fill_value, dtype):
41        full_grad = self.grad_func(self.wrap_full)
42        return full_grad(size, fill_value, dtype)
43
44
45def full_forward_func(size, fill_value, dtype=None):
46    y = mint.full(size, fill_value, dtype=dtype)
47    return y
48
49
50def full_backward_func(size, fill_value, dtype=None):
51    size_grad, value_grad = ops.grad(full_forward_func, (0, 1))(size, fill_value, dtype=dtype)
52    return size_grad, value_grad
53
54
55@pytest.mark.level0
56@pytest.mark.env_onecard
57@pytest.mark.platform_arm_ascend_training
58@pytest.mark.platform_x86_ascend_training
59@pytest.mark.parametrize('mode', ['GE', 'pynative', 'KBK'])
60def test_full_forward(mode):
61    """
62    Feature: Ops.
63    Description: test full.
64    Expectation: expect correct result.
65    """
66    size = (1, 2, 3)
67    value = 6.
68    dtype = None
69    expect_y = np.array([[[6., 6., 6.],
70                          [6., 6., 6.]]])
71    if mode == 'pynative':
72        ms.context.set_context(mode=ms.PYNATIVE_MODE)
73        y = full_forward_func(size, value, dtype)
74    elif mode == 'KBK':
75        y = (jit(full_forward_func, jit_config=JitConfig(jit_level="O0")))(size, value, dtype)
76    else:
77        y = (jit(full_forward_func, jit_config=JitConfig(jit_level="O2")))(size, value, dtype)
78    np.testing.assert_allclose(y.asnumpy(), expect_y, rtol=1e-5)
79
80
81@pytest.mark.level0
82@pytest.mark.env_onecard
83@pytest.mark.platform_arm_ascend_training
84@pytest.mark.platform_x86_ascend_training
85@pytest.mark.parametrize('mode', ['pynative', 'KBK']) # GE is unsupported for now.
86def test_full_backward(mode):
87    """
88    Feature: Ops.
89    Description: test full backward.
90    Expectation: expect correct result.
91    """
92    size = Tensor(np.array([1, 2, 3]).astype(np.int64))
93    value = Tensor(6)
94    dtype = mstype.int32
95    expect_size_grad = 0
96    expect_value_grad = 6
97    if mode == 'pynative':
98        ms.context.set_context(mode=ms.PYNATIVE_MODE)
99        size_grad, value_grad = full_backward_func(size, value, dtype)
100    elif mode == 'KBK':
101        size_grad, value_grad = (jit(full_backward_func, jit_config=JitConfig(jit_level="O0")))(size, value, dtype)
102    else:
103        size_grad, value_grad = (jit(full_backward_func, jit_config=JitConfig(jit_level="O2")))(size, value, dtype)
104    np.testing.assert_allclose(size_grad.asnumpy(), expect_size_grad, rtol=1e-5)
105    np.testing.assert_allclose(value_grad.asnumpy(), expect_value_grad, rtol=1e-5)
106    assert value_grad.shape == ()
107
108
109@pytest.mark.level0
110@pytest.mark.env_onecard
111@pytest.mark.platform_arm_ascend_training
112@pytest.mark.platform_x86_ascend_training
113def test_full_dynamic_shape():
114    """
115    Feature: Test full with dynamic shape in graph mode.
116    Description: call ops.mint.full with valid input and index.
117    Expectation: return the correct value.
118    """
119    size_1 = Tensor(np.array([1, 2, 3]).astype(np.int64))
120    value_1 = Tensor([5])
121    size_2 = Tensor(np.array([1, 2, 3, 4]).astype(np.int64))
122    value_2 = Tensor(6)
123
124    TEST_OP(full_forward_func, [[size_1, value_1], [size_2, value_2]], '', disable_input_check=True,
125            disable_yaml_check=True, disable_tensor_dynamic_type='DYNAMIC_RANK')
126
127    size_1 = (1, 2, 3)
128    value_1 = 5
129    size_2 = (4, 3, 2)
130    value_2 = 6
131
132    TEST_OP(full_forward_func, [[size_1, value_1], [size_2, value_2]], '', disable_input_check=True,
133            disable_yaml_check=True, disable_grad=True)
134
135
136@pytest.mark.level0
137@pytest.mark.env_onecard
138@pytest.mark.platform_arm_ascend_training
139@pytest.mark.platform_x86_ascend_training
140@pytest.mark.parametrize("context_mode", [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
141def test_full_forward_dynamic_rank(context_mode):
142    """
143    Feature: full ops.
144    Description: test ops full with dynamic shape tensor input.
145    Expectation: output the right result.
146    """
147    context.set_context(mode=context_mode)
148    size_dyn = Tensor(shape=None, dtype=mstype.int64)
149    value_dyn = Tensor(shape=None, dtype=mstype.float32)
150    test_cell = FullNet()
151    test_cell.set_inputs(size_dyn, value_dyn, ms.int32)
152    size = Tensor(np.array([2, 3]).astype(np.int64))
153    value = Tensor(np.array([3]).astype(np.float32))
154    out = test_cell(size, value, ms.int32)
155    expect_output = np.full((2, 3), 3, np.int32)
156    assert np.allclose(out.asnumpy(), expect_output)
157
158    with pytest.raises((TypeError, ValueError)):
159        size = Tensor(np.array([[2, 3], [4, 5]]).astype(np.int64))
160        value = Tensor(np.array([3, 4]).astype(np.float32))
161        _ = test_cell(size, value, ms.int32)
162
163
164@pytest.mark.level1
165@pytest.mark.env_onecard
166@pytest.mark.platform_arm_ascend_training
167@pytest.mark.platform_x86_ascend_training
168@pytest.mark.parametrize("context_mode", [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
169def test_full_backward_dynamic_rank(context_mode):
170    """
171    Feature: full ops.
172    Description: test ops full with dynamic shape tensor input.
173    Expectation: output the right result.
174    """
175    context.set_context(mode=context_mode)
176    size_dyn = Tensor(shape=None, dtype=ms.int64)
177    value_dyn = Tensor(shape=None, dtype=mstype.float32)
178    test_cell = FullGradNet()
179    test_cell.set_inputs(size_dyn, value_dyn, ms.int32)
180    size = Tensor(np.array([2, 3]).astype(np.int64))
181    value = Tensor(np.array([3]).astype(np.float32))
182    dsize, dvalue = test_cell(size, value, ms.int32)
183    expect_dsize = 0
184    expect_dvalue = [6]
185    assert np.allclose(dsize.asnumpy(), expect_dsize)
186    assert np.allclose(dvalue.asnumpy(), expect_dvalue)
187
188    with pytest.raises((TypeError, ValueError)):
189        size = Tensor(np.array([[2, 3], [4, 5]]).astype(np.int64))
190        value = Tensor(np.array([2, 3]).astype(np.float32))
191        _ = test_cell(size, value, ms.int32)
192