1# Copyright 2019 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15 16import numpy as np 17import pytest 18 19import mindspore.context as context 20import mindspore.nn as nn 21from mindspore import Tensor 22from mindspore.common.initializer import initializer 23from mindspore.common.parameter import Parameter 24from mindspore.ops.operations import _grad_ops as G 25 26context.set_context(mode=context.GRAPH_MODE, device_target='CPU') 27 28 29class NetReluGrad(nn.Cell): 30 def __init__(self): 31 super(NetReluGrad, self).__init__() 32 self.relu_grad = G.ReluGrad() 33 self.x = Parameter(initializer(Tensor(np.array([[[[-1, 1, 1], 34 [1, -1, 1], 35 [1, 1, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), name='x') 36 self.dy = Parameter(initializer(Tensor(np.array([[[[1, 0, 1], 37 [0, 1, 0], 38 [1, 1, 1]]]]).astype(np.float32)), [1, 1, 3, 3]), name='dy') 39 40 def construct(self): 41 return self.relu_grad(self.dy, self.x) 42 43 44@pytest.mark.level0 45@pytest.mark.platform_x86_cpu 46@pytest.mark.env_onecard 47def test_relu_grad(): 48 relu_grad = NetReluGrad() 49 output = relu_grad() 50 expect = np.array([[[[0, 0, 1], [0, 0, 0], [1, 1, 0]]]]).astype(np.float32) 51 error = np.ones(shape=[3, 3]) * 1.0e-6 52 diff = np.abs(output.asnumpy() - expect) 53 assert np.all(diff < error) 54