• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2022 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import pytest
17import numpy as np
18from mindspore.ops import operations as P
19import mindspore.nn as nn
20from mindspore.common.parameter import Parameter
21from tests.mindspore_test_framework.utils.check_gradient import (
22    check_jacobian, Tensor, OperationGradChecker, check_gradient, NNGradChecker)
23
24
25@pytest.mark.level1
26@pytest.mark.platform_x86_cpu
27@pytest.mark.env_onecard
28def test_operation_grad_checker():
29    """
30    Feature: Auto diff.
31    Description: Check the result for GradOperation.
32    Expectation: The result is expected.
33    """
34    class Net(nn.Cell):
35        """ Net definition """
36
37        def __init__(self):
38            super(Net, self).__init__()
39            self.matmul = P.MatMul()
40            self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
41
42        def construct(self, x, y):
43            x = x * self.z
44            out = self.matmul(x, y)
45            return out
46
47    check_gradient(Net(), Tensor(np.array([[0.65, 0.8, 0.8]], np.float32)),
48                   Tensor(np.array([[0.1], [0.2], [-.1]], np.float32)), grad_checker_class=OperationGradChecker,
49                   input_selector=[1], sampling_times=2)
50
51
52@pytest.mark.level1
53@pytest.mark.platform_x86_cpu
54@pytest.mark.env_onecard
55def test_grad_checker_primitive():
56    """
57    Feature: Auto diff.
58    Description: Check the result for GradOperation.
59    Expectation: The result is expected.
60    """
61    matmul = P.MatMul()
62
63    def prim_f(x, y):
64        return matmul(x, y)
65
66    check_gradient(prim_f, Tensor(np.array([[0.65, 0.8, 0.8]], np.float32)),
67                   Tensor(np.array([[0.1], [0.2], [-.1]], np.float32)),
68                   grad_checker_class=OperationGradChecker, sampling_times=2)
69
70
71@pytest.mark.level1
72@pytest.mark.platform_x86_cpu
73@pytest.mark.env_onecard
74def test_nn_jacobian_checker():
75    """
76    Feature: Auto diff.
77    Description: Check the result for GradOperation.
78    Expectation: The result is expected.
79    """
80    class Net(nn.Cell):
81        """ Net definition """
82
83        def __init__(self):
84            super(Net, self).__init__()
85            self.dense = nn.Dense(10, 10)
86
87        def construct(self, x):
88            out = self.dense(x)
89            return out, x
90
91    check_jacobian(Net(), Tensor(np.random.rand(1, 10).astype(np.float32)),
92                   delta=1e-3,
93                   max_error=1e-7,
94                   grad_checker_class=NNGradChecker,
95                   input_selector=[1],
96                   output_selector=[0])
97
98
99@pytest.mark.level1
100@pytest.mark.platform_x86_cpu
101@pytest.mark.env_onecard
102def test_nn_grad_checker():
103    """
104    Feature: Auto diff.
105    Description: Check the result for GradOperation.
106    Expectation: The result is expected.
107    """
108    class Net(nn.Cell):
109        """ Net definition """
110
111        def __init__(self):
112            super(Net, self).__init__()
113            self.dense = nn.Dense(10, 10)
114
115        def construct(self, x):
116            out = self.dense(x)
117            return out
118
119    check_gradient(Net(), Tensor(np.random.rand(1, 10).astype(np.float32)),
120                   delta=1e-3,
121                   max_error=1e-3,
122                   grad_checker_class=NNGradChecker, sampling_times=3)
123
124
125@pytest.mark.level1
126@pytest.mark.platform_x86_cpu
127@pytest.mark.env_onecard
128def test_operation_jacobian_checker():
129    """
130    Feature: Auto diff.
131    Description: Check the result for GradOperation.
132    Expectation: The result is expected.
133    """
134    class Net(nn.Cell):
135        """ Net definition """
136
137        def __init__(self):
138            super(Net, self).__init__()
139            self.matmul = P.MatMul()
140            self.z = Parameter(Tensor(np.array([1.0], np.float32)), name='z')
141
142        def construct(self, x, y):
143            x = x * self.z
144            out = self.matmul(x, y)
145            return x, out
146
147    check_jacobian(Net(), Tensor(np.array([[0.65, 0.8, 0.8], [0.1, 0.2, 0.3]], np.float32)),
148                   Tensor(np.array([[0.1, 0.3], [0.2, 0.2], [-.1, 0.4]], np.float32)),
149                   grad_checker_class=OperationGradChecker, input_selector=[0],
150                   output_selector=[0])
151