1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15 16import numpy as np 17import pytest 18 19import mindspore.context as context 20import mindspore.nn as nn 21from mindspore import Tensor 22from mindspore.ops import operations as P 23 24context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 25 26 27class SoftplusNet(nn.Cell): 28 def __init__(self): 29 super(SoftplusNet, self).__init__() 30 self.softplus = P.Softplus() 31 32 def construct(self, x): 33 return self.softplus(x) 34 35 36def SoftplusCompute(x): 37 return np.log(1 + np.exp(x)) 38 39 40@pytest.mark.level0 41@pytest.mark.platform_x86_gpu_training 42@pytest.mark.env_onecard 43def test_softplus_1d(): 44 x_np = np.random.random((50,)).astype(np.float32) 45 y_np = SoftplusCompute(x_np) 46 47 x_ms = Tensor(x_np) 48 net = SoftplusNet() 49 y_ms = net(x_ms) 50 51 assert np.allclose(y_np, y_ms.asnumpy()) 52 53 54@pytest.mark.level0 55@pytest.mark.platform_x86_gpu_training 56@pytest.mark.env_onecard 57def test_softplus_2d(): 58 x_np = np.random.random((50, 40)).astype(np.float32) 59 y_np = SoftplusCompute(x_np) 60 61 x_ms = Tensor(x_np) 62 net = SoftplusNet() 63 y_ms = net(x_ms) 64 65 assert np.allclose(y_np, y_ms.asnumpy()) 66 67 68@pytest.mark.level0 69@pytest.mark.platform_x86_gpu_training 70@pytest.mark.env_onecard 71def test_softplus_4d(): 72 x_np = np.random.random((32, 3, 224, 224)).astype(np.float32) 73 y_np = SoftplusCompute(x_np) 74 75 x_ms = Tensor(x_np) 76 net = SoftplusNet() 77 y_ms = net(x_ms) 78 79 assert np.allclose(y_np, y_ms.asnumpy()) 80 81 82@pytest.mark.level0 83@pytest.mark.platform_x86_gpu_training 84@pytest.mark.env_onecard 85def test_softplus_neg(): 86 x_np = np.random.random((32, 3, 224, 224)).astype(np.float32) * -1 87 y_np = SoftplusCompute(x_np) 88 89 x_ms = Tensor(x_np) 90 net = SoftplusNet() 91 y_ms = net(x_ms) 92 93 assert np.allclose(y_np, y_ms.asnumpy()) 94 95@pytest.mark.level0 96@pytest.mark.platform_x86_gpu_training 97@pytest.mark.env_onecard 98def test_softplus_4d_fp16(): 99 x_np = np.random.random((32, 3, 224, 224)).astype(np.float16) 100 y_np = SoftplusCompute(x_np) 101 102 x_ms = Tensor(x_np) 103 net = SoftplusNet() 104 y_ms = net(x_ms) 105 106 assert np.allclose(y_np, y_ms.asnumpy(), rtol=5e-3) 107