• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15import numpy as np
16import pytest
17import mindspore.context as context
18import mindspore.nn as nn
19from mindspore import Tensor
20from mindspore.ops import operations as P
21
22
23class Net(nn.Cell):
24    def __init__(self):
25        super(Net, self).__init__()
26        self.relu = P.ReLU()
27
28    def construct(self, x):
29        return self.relu(x)
30
31
32def get_output(x, enable_graph_kernel=False):
33    context.set_context(enable_graph_kernel=enable_graph_kernel)
34    net = Net()
35    output = net(x)
36    return output
37
38
39def test_relu(shape, dtype):
40    x = Tensor(np.random.normal(0, 10, shape).astype(dtype))
41    expect = get_output(x, False)
42    output = get_output(x, True)
43
44    expect_np = expect.asnumpy().copy()
45    output_np = output.asnumpy().copy()
46
47    assert np.allclose(expect_np, output_np, 0.0001, 0.0001)
48
49
50@pytest.mark.level0
51@pytest.mark.platform_x86_gpu_training
52@pytest.mark.env_onecard
53def test_relu_gpu():
54    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
55    test_relu((4, 3), np.int32)
56    test_relu((12, 1), np.float16)
57
58
59@pytest.mark.level0
60@pytest.mark.platform_arm_ascend_training
61@pytest.mark.platform_x86_ascend_training
62@pytest.mark.env_onecard
63def test_relu_ascend():
64    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
65    test_relu((4, 3), np.int32)
66    test_relu((12, 1), np.float16)
67