• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import numpy as np
17import pytest
18
19import mindspore.context as context
20import mindspore.nn as nn
21from mindspore import Tensor
22from mindspore.ops import operations as P
23
24class NetElu(nn.Cell):
25    def __init__(self):
26        super(NetElu, self).__init__()
27        self.elu = P.Elu()
28
29    def construct(self, x):
30        return self.elu(x)
31
32
33@pytest.mark.level0
34@pytest.mark.platform_x86_cpu
35@pytest.mark.env_onecard
36def test_elu_fp16():
37    x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]).astype(np.float16))
38    expect = np.array([[-0.632, 4.0, -0.999], [2.0, -0.993, 9.0]]).astype(np.float16)
39    error = np.ones(shape=[2, 3]) * 1.0e-6
40
41    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
42    elu = NetElu()
43    output = elu(x)
44    diff = output.asnumpy() - expect
45    assert np.all(diff < error)
46
47@pytest.mark.level0
48@pytest.mark.platform_x86_cpu
49@pytest.mark.env_onecard
50def test_elu_fp32():
51    x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]).astype(np.float32))
52    expect = np.array([[-0.632, 4.0, -0.999], [2.0, -0.993, 9.0]]).astype(np.float32)
53    error = np.ones(shape=[2, 3]) * 1.0e-6
54
55    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
56    elu = NetElu()
57    output = elu(x)
58    diff = output.asnumpy() - expect
59    assert np.all(diff < error)
60