• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020-2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import numpy as np
17import pytest
18
19import mindspore.context as context
20import mindspore.nn as nn
21from mindspore import Tensor
22from mindspore.ops import operations as P
23from mindspore.ops.operations import _inner_ops as inner
24
25
26class NetReLU6(nn.Cell):
27    def __init__(self):
28        super(NetReLU6, self).__init__()
29        self.relu6 = P.ReLU6()
30
31    def construct(self, x):
32        return self.relu6(x)
33
34
35class NetRelu6Dynamic(nn.Cell):
36    def __init__(self):
37        super(NetRelu6Dynamic, self).__init__()
38        self.test_dynamic = inner.GpuConvertToDynamicShape()
39        self.relu6 = P.ReLU6()
40
41    def construct(self, x):
42        x = self.test_dynamic(x)
43        return self.relu6(x)
44
45
46@pytest.mark.level0
47@pytest.mark.platform_x86_gpu_training
48@pytest.mark.env_onecard
49def test_relu6():
50    x = Tensor(np.array([[[[-1, 1, 10],
51                           [5.9, 6.1, 6],
52                           [10, 1, -1]]]]).astype(np.float32))
53    expect = np.array([[[[0, 1, 6,],
54                         [5.9, 6, 6,],
55                         [6, 1, 0.]]]]).astype(np.float32)
56
57    context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU")
58    relu6 = NetReLU6()
59    output = relu6(x)
60    assert (output.asnumpy() == expect).all()
61
62    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
63    relu6 = NetReLU6()
64    output = relu6(x)
65    assert (output.asnumpy() == expect).all()
66
67
68@pytest.mark.level0
69@pytest.mark.platform_x86_gpu_training
70@pytest.mark.env_onecard
71def test_relu6_dynamic():
72
73    x1 = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]).astype(np.float32))
74    expect1 = np.array([[0, 4, 0,],
75                        [2, 0, 6,]]).astype(np.float32)
76    x2 = Tensor(np.array([[[[-1, 1, 10],
77                            [5.9, 6.1, 6],
78                            [10, 1, -1]]]]).astype(np.float32))
79    expect2 = np.array([[[[0, 1, 6,],
80                          [5.9, 6, 6,],
81                          [6, 1, 0.]]]]).astype(np.float32)
82
83
84    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
85    relu6 = NetRelu6Dynamic()
86    output1 = relu6(x1)
87    assert (output1.asnumpy() == expect1).all()
88    output2 = relu6(x2)
89    assert (output2.asnumpy() == expect2).all()
90