• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import numpy as np
17import pytest
18
19import mindspore.context as context
20import mindspore.nn as nn
21from mindspore import Tensor
22from mindspore.ops import composite as C
23from mindspore.ops import operations as P
24
25
26class LogSoftmax(nn.Cell):
27    def __init__(self, axis=1):
28        super(LogSoftmax, self).__init__()
29        self.logsoftmax = P.LogSoftmax(axis)
30
31    def construct(self, x):
32        return self.logsoftmax(x)
33
34
35class Grad(nn.Cell):
36    def __init__(self, network):
37        super(Grad, self).__init__()
38        self.grad = C.GradOperation(get_all=True, sens_param=True)
39        self.network = network
40
41    def construct(self, input_data, sens):
42        gout = self.grad(self.network)(input_data, sens)
43        return gout
44
45
46def test_logsoftmax():
47    x = np.array([[-0.08082921, -0.13706027, -0.4711177, -0.05606057],
48                  [-0.46082982, 1.1761844, -1.016654, -1.743829],
49                  [-1.5062045, 0.6910976, 0.4839723, 1.1502692]]).astype(np.float32)
50    expect = np.array([[-1.2939762, -1.3502073, -1.6842647, -1.2692076],
51                       [-1.9445671, -0.3075528, -2.5003912, -3.2275662],
52                       [-3.452001, -1.2546989, -1.4618242, -0.79552734]]).astype(np.float32)
53    logSoftmax = LogSoftmax()
54    output = logSoftmax(Tensor(x))
55    assert np.allclose(output.asnumpy(), expect)
56
57
58def test_logsoftmaxgrad():
59    x = np.array([[-0.47705367, 0.48267725, -1.0453935, 1.574488, 0.20362134, 0.4435456, -0.23984082, -0.43684655,
60                   -0.7725506, 1.4481013],
61                  [1.1012247, 1.7069651, 0.55062026, 0.3361901, -1.1082426, -0.5001939, -0.3255393, -0.7972024,
62                   -0.27965206, -0.702805],
63                  [0.19450496, 0.87596166, 0.6467245, -1.044987, 0.5248943, -2.6166635, 1.6719198, 0.06600758,
64                   -0.4099178, 1.1861311],
65                  [1.1305193, -1.97308, 2.1047623, -1.5105937, 0.93052036, 1.2467804, 0.5310002, 0.7084912, -1.3681422,
66                   -0.9686862],
67                  [1.871408, 0.14219497, -0.41050452, -0.749807, 1.4900619, -1.8172716, -0.73839617, 0.17565694,
68                   -0.4553867, -1.5423119]]).astype(np.float32)
69    dy = np.array([[1.516363, -0.15196544, 0.598733, 0.64357865, 0.16265012, -1.3521105, 0.22621834, 0.7168259,
70                    -0.6709239, 0.79757756],
71                   [-0.32457778, 1.2831115, 1.1211495, -0.02665559, 1.9170904, -1.3397789, 1.4124829, -1.4298155,
72                    0.758519, -0.25322974],
73                   [-0.24226122, -1.2555921, 0.6492511, -0.34847677, 0.19916506, 0.628554, -0.19658111, 0.44939864,
74                    -0.11677749, -1.2131723],
75                   [0.24267715, 0.28106326, 1.1075432, -0.29006946, 0.31335673, 0.8833154, 0.13152207, 1.5482179,
76                    0.29770762, -0.16246222],
77                   [0.02145994, 0.80424, -0.95061, 1.5875458, -0.00308682, 0.17964548, 0.49912593, 0.46977136,
78                    0.2151897, 0.30908248]]).astype(np.float32)
79    expect = np.array([[1.4219905, -0.39837134, 0.5452743, -0.09062839, -0.02375537, -1.5890603, 0.10658137, 0.6185817,
80                        -0.7411523, 0.15054005],
81                       [-0.94926417, 0.13830578, 0.7609547, -0.31733334, 1.8485254, -1.4657221, 1.2625053, -1.523396,
82                        0.601499, -0.35607445],
83                       [-0.14447737, -1.0622973, 0.80294746, -0.32016528, 0.33523226, 0.63443416, 0.23186903,
84                        0.53539133, -0.0633494, -0.9495847],
85                       [-0.36894822, 0.253609, -0.5127511, -0.33366728, -0.18740037, 0.19628316, -0.20430653, 1.1471655,
86                        0.24743511, -0.23741922],
87                       [-1.2582518, 0.57718843, -1.0812542, 1.4944922, -0.8770549, 0.1476463, 0.40500447, 0.23499368,
88                        0.09027944, 0.26695627]]).astype(np.float32)
89    net = LogSoftmax()
90    dx = Grad(net)(Tensor(x), Tensor(dy))
91    assert np.allclose(dx[0].asnumpy(), expect)
92
93
94@pytest.mark.level0
95@pytest.mark.platform_x86_gpu_training
96@pytest.mark.env_onecard
97def test_logsoftmax_gpu():
98    context.set_context(mode=context.GRAPH_MODE, enable_graph_kernel=True, device_target="GPU")
99    test_logsoftmax()
100
101
102@pytest.mark.level0
103@pytest.mark.platform_x86_gpu_training
104@pytest.mark.env_onecard
105def test_logsoftmaxgrad_gpu():
106    context.set_context(mode=context.GRAPH_MODE, enable_graph_kernel=True, device_target="GPU")
107    test_logsoftmaxgrad()
108
109@pytest.mark.level0
110@pytest.mark.platform_arm_ascend_training
111@pytest.mark.platform_x86_ascend_training
112@pytest.mark.env_onecard
113def test_logsoftmax_asend():
114    context.set_context(mode=context.GRAPH_MODE, enable_graph_kernel=True, device_target="Ascend")
115    test_logsoftmax()
116
117@pytest.mark.level0
118@pytest.mark.platform_arm_ascend_training
119@pytest.mark.platform_x86_ascend_training
120@pytest.mark.env_onecard
121def test_logsoftmaxgrad_asend():
122    context.set_context(mode=context.GRAPH_MODE, enable_graph_kernel=True, device_target="Ascend")
123    test_logsoftmaxgrad()
124