• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15""" test Activations """
16import numpy as np
17
18import mindspore.nn as nn
19from mindspore import Tensor
20from mindspore.common.api import _cell_graph_executor
21from ..ut_filter import non_graph_engine
22
23
24class SoftmaxNet(nn.Cell):
25    def __init__(self, dim):
26        super(SoftmaxNet, self).__init__()
27        self.softmax = nn.Softmax(dim)
28
29    def construct(self, x):
30        return self.softmax(x)
31
32
33@non_graph_engine
34def test_compile():
35    net = SoftmaxNet(0)
36    input_tensor = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
37    net(input_tensor)
38
39
40@non_graph_engine
41def test_compile_axis():
42    net = SoftmaxNet(-1)
43    prob = 355
44    input_data = np.random.randn(4, 16, 1, 1).astype(np.float32) * prob
45    input_tensor = Tensor(input_data)
46    net(input_tensor)
47
48
49class LogSoftmaxNet(nn.Cell):
50    def __init__(self, dim):
51        super(LogSoftmaxNet, self).__init__()
52        self.logsoftmax = nn.LogSoftmax(dim)
53
54    def construct(self, x):
55        return self.logsoftmax(x)
56
57
58@non_graph_engine
59def test_compile_logsoftmax():
60    net = LogSoftmaxNet(0)
61    input_tensor = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
62    net(input_tensor)
63
64
65class Net1(nn.Cell):
66    def __init__(self):
67        super(Net1, self).__init__()
68        self.relu = nn.ReLU()
69
70    def construct(self, x):
71        return self.relu(x)
72
73
74def test_compile_relu():
75    net = Net1()
76    input_data = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
77    _cell_graph_executor.compile(net, input_data)
78
79
80class Net_gelu(nn.Cell):
81    def __init__(self):
82        super(Net_gelu, self).__init__()
83        self.gelu = nn.GELU()
84
85    def construct(self, x):
86        return self.gelu(x)
87
88
89def test_compile_gelu():
90    net = Net_gelu()
91    input_data = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
92    _cell_graph_executor.compile(net, input_data)
93
94
95class NetLeakyReLU(nn.Cell):
96    def __init__(self, alpha):
97        super(NetLeakyReLU, self).__init__()
98        self.leaky_relu = nn.LeakyReLU(alpha)
99
100    def construct(self, x):
101        return self.leaky_relu(x)
102
103
104def test_compile_leaky_relu():
105    net = NetLeakyReLU(alpha=0.1)
106    input_data = Tensor(np.array([[1.6, 0, 0.6], [6, 0, -6]], dtype=np.float32))
107    _cell_graph_executor.compile(net, input_data)
108