• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import numpy as np
17import pytest
18
19import mindspore.context as context
20import mindspore.nn as nn
21from mindspore import Tensor
22from mindspore.ops import composite as C
23from mindspore.ops import operations as P
24
25
26@pytest.mark.level0
27@pytest.mark.platform_x86_cpu
28@pytest.mark.env_onecard
29def test_logsoftmax():
30    x = np.array([[-0.08082921, -0.13706027, -0.4711177, -0.05606057],
31                  [-0.46082982, 1.1761844, -1.016654, -1.743829],
32                  [-1.5062045, 0.6910976, 0.4839723, 1.1502692]]).astype(np.float32)
33    expect = np.array([[-1.2939762, -1.3502073, -1.6842647, -1.2692076],
34                       [-1.9445671, -0.3075528, -2.5003912, -3.2275662],
35                       [-3.452001, -1.2546989, -1.4618242, -0.79552734]]).astype(np.float32)
36
37    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
38    logSoftmax = P.LogSoftmax()
39    output = logSoftmax(Tensor(x))
40    diff = output.asnumpy() - expect
41    err = np.ones(shape=expect.shape) * 1.0e-5
42    assert np.all(diff < err)
43
44
45class LogSoftmax(nn.Cell):
46    def __init__(self, axis=-1):
47        super(LogSoftmax, self).__init__()
48        self.logsoftmax = P.LogSoftmax(axis)
49
50    def construct(self, x):
51        return self.logsoftmax(x)
52
53
54class Grad(nn.Cell):
55    def __init__(self, network):
56        super(Grad, self).__init__()
57        self.grad = C.GradOperation(get_all=True, sens_param=True)
58        self.network = network
59
60    def construct(self, input_data, sens):
61        gout = self.grad(self.network)(input_data, sens)
62        return gout
63
64
65@pytest.mark.level0
66@pytest.mark.platform_x86_cpu
67@pytest.mark.env_onecard
68def test_logsoftmaxgrad():
69    x = np.array([[-0.47705367, 0.48267725, -1.0453935, 1.574488, 0.20362134, 0.4435456, -0.23984082, -0.43684655,
70                   -0.7725506, 1.4481013],
71                  [1.1012247, 1.7069651, 0.55062026, 0.3361901, -1.1082426, -0.5001939, -0.3255393, -0.7972024,
72                   -0.27965206, -0.702805],
73                  [0.19450496, 0.87596166, 0.6467245, -1.044987, 0.5248943, -2.6166635, 1.6719198, 0.06600758,
74                   -0.4099178, 1.1861311],
75                  [1.1305193, -1.97308, 2.1047623, -1.5105937, 0.93052036, 1.2467804, 0.5310002, 0.7084912, -1.3681422,
76                   -0.9686862],
77                  [1.871408, 0.14219497, -0.41050452, -0.749807, 1.4900619, -1.8172716, -0.73839617, 0.17565694,
78                   -0.4553867, -1.5423119]]).astype(np.float32)
79    dy = np.array([[1.516363, -0.15196544, 0.598733, 0.64357865, 0.16265012, -1.3521105, 0.22621834, 0.7168259,
80                    -0.6709239, 0.79757756],
81                   [-0.32457778, 1.2831115, 1.1211495, -0.02665559, 1.9170904, -1.3397789, 1.4124829, -1.4298155,
82                    0.758519, -0.25322974],
83                   [-0.24226122, -1.2555921, 0.6492511, -0.34847677, 0.19916506, 0.628554, -0.19658111, 0.44939864,
84                    -0.11677749, -1.2131723],
85                   [0.24267715, 0.28106326, 1.1075432, -0.29006946, 0.31335673, 0.8833154, 0.13152207, 1.5482179,
86                    0.29770762, -0.16246222],
87                   [0.02145994, 0.80424, -0.95061, 1.5875458, -0.00308682, 0.17964548, 0.49912593, 0.46977136,
88                    0.2151897, 0.30908248]]).astype(np.float32)
89    expect = np.array([[1.4219905, -0.39837134, 0.5452743, -0.09062839, -0.02375537, -1.5890603, 0.10658137, 0.6185817,
90                        -0.7411523, 0.15054005],
91                       [-0.94926417, 0.13830578, 0.7609547, -0.31733334, 1.8485254, -1.4657221, 1.2625053, -1.523396,
92                        0.601499, -0.35607445],
93                       [-0.14447737, -1.0622973, 0.80294746, -0.32016528, 0.33523226, 0.63443416, 0.23186903,
94                        0.53539133, -0.0633494, -0.9495847],
95                       [-0.36894822, 0.253609, -0.5127511, -0.33366728, -0.18740037, 0.19628316, -0.20430653, 1.1471655,
96                        0.24743511, -0.23741922],
97                       [-1.2582518, 0.57718843, -1.0812542, 1.4944922, -0.8770549, 0.1476463, 0.40500447, 0.23499368,
98                        0.09027944, 0.26695627]]).astype(np.float32)
99
100    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
101    net = LogSoftmax()
102    dx = Grad(net)(Tensor(x), Tensor(dy))
103    diff = dx[0].asnumpy() - expect
104    err = np.ones(shape=expect.shape) * 1.0e-5
105    assert np.all(diff < err)
106
107
108@pytest.mark.level0
109@pytest.mark.platform_x86_cpu
110@pytest.mark.env_onecard
111def test_logsoftmaxgrad1():
112    x = np.array([[-0.47705367, 0.48267725, -1.0453935, 1.574488, 0.20362134, 0.4435456, -0.23984082, -0.43684655,
113                   -0.7725506, 1.4481013],
114                  [1.1012247, 1.7069651, 0.55062026, 0.3361901, -1.1082426, -0.5001939, -0.3255393, -0.7972024,
115                   -0.27965206, -0.702805],
116                  [0.19450496, 0.87596166, 0.6467245, -1.044987, 0.5248943, -2.6166635, 1.6719198, 0.06600758,
117                   -0.4099178, 1.1861311],
118                  [1.1305193, -1.97308, 2.1047623, -1.5105937, 0.93052036, 1.2467804, 0.5310002, 0.7084912, -1.3681422,
119                   -0.9686862],
120                  [1.871408, 0.14219497, -0.41050452, -0.749807, 1.4900619, -1.8172716, -0.73839617, 0.17565694,
121                   -0.4553867, -1.5423119]]).astype(np.float32)
122    dy = np.array([[1.516363, -0.15196544, 0.598733, 0.64357865, 0.16265012, -1.3521105, 0.22621834, 0.7168259,
123                    -0.6709239, 0.79757756],
124                   [-0.32457778, 1.2831115, 1.1211495, -0.02665559, 1.9170904, -1.3397789, 1.4124829, -1.4298155,
125                    0.758519, -0.25322974],
126                   [-0.24226122, -1.2555921, 0.6492511, -0.34847677, 0.19916506, 0.628554, -0.19658111, 0.44939864,
127                    -0.11677749, -1.2131723],
128                   [0.24267715, 0.28106326, 1.1075432, -0.29006946, 0.31335673, 0.8833154, 0.13152207, 1.5482179,
129                    0.29770762, -0.16246222],
130                   [0.02145994, 0.80424, -0.95061, 1.5875458, -0.00308682, 0.17964548, 0.49912593, 0.46977136,
131                    0.2151897, 0.30908248]]).astype(np.float32)
132    expect = np.array([[1.464194, -0.29578894, 0.5296974, -0.39600563, -0.1479242, -1.0869746, 0.04521982, 0.5064515,
133                        -0.7515615, 1.0554069],
134                       [-0.5774203, 0.793861, 0.7805745, -0.32800734, 1.8334473, -1.236596, 1.2463496, -1.5765365,
135                        0.6265108, -0.22322391],
136                       [-0.34437084, -1.4687154, 0.27432096, -0.42420125, -0.22908019, 0.640983, -1.4210342, 0.10155854,
137                        -0.23266247, -1.0147638],
138                       [-0.01768187, 0.26872346, -0.5037259, -0.3376058, -0.3291146, 1.4752979, -0.25972134, 0.8869053,
139                        0.25325722, -0.13946185],
140                       [-0.5247209, 0.70192003, -1.0808672, 1.4858199, -1.1273282, 0.20728993, 0.38918605, 0.08162117,
141                        0.10445589, 0.3220427]],).astype(np.float32)
142
143    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
144    net = LogSoftmax(0)
145    dx = Grad(net)(Tensor(x), Tensor(dy))
146    diff = dx[0].asnumpy() - expect
147    err = np.ones(shape=expect.shape) * 1.0e-5
148    assert np.all(diff < err)
149