• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15import numpy as np
16import pytest
17import mindspore.context as context
18import mindspore.nn as nn
19from mindspore.common.tensor import Tensor
20from mindspore.common.parameter import Parameter
21from mindspore.ops import operations as P
22
23
24class Net(nn.Cell):
25    def __init__(self, input_scale, input_bias, input_mean, input_variance, is_training):
26        super(Net, self).__init__()
27        self.fused_bn_ex = P.BatchNorm(is_training=is_training, epsilon=1e-5, momentum=0.9)
28        self.scale = Parameter(input_scale, name='scale')
29        self.bias = Parameter(input_bias, name='b')
30        self.mean = Parameter(input_mean, name='mean')
31        self.variance = Parameter(input_variance, name='variance')
32    def construct(self, input_x):
33        return self.fused_bn_ex(input_x, self.scale, self.bias, self.mean, self.variance)
34
35
36def get_output(x, weight, bias, moving_mean, moving_var, is_training, enable_graph_kernel=False):
37    context.set_context(enable_graph_kernel=enable_graph_kernel)
38    net = Net(Tensor(weight), Tensor(bias), Tensor(moving_mean), Tensor(moving_var), is_training)
39    output = net(Tensor(x))
40    return output, net.mean, net.variance
41
42
43def test_bn_train():
44    x = np.random.normal(0, 1, [1, 2, 4, 4]).astype(np.float32)
45    weight = np.random.normal(0, 1, [2,]).astype(np.float32)
46    bias = np.random.normal(0, 1, [2,]).astype(np.float32)
47    moving_mean = np.random.normal(0, 1, [2,]).astype(np.float32)
48    moving_var = np.random.normal(0, 1, [2,]).astype(np.float32)
49
50    train_expect = get_output(x, weight, bias, moving_mean, moving_var, True, False)
51    train_output = get_output(x, weight, bias, moving_mean, moving_var, True, True)
52
53    assert np.allclose(train_expect[0][0].asnumpy(), train_output[0][0].asnumpy(), 0.0001, 0.0001)
54    assert np.allclose(train_expect[0][3].asnumpy(), train_output[0][3].asnumpy(), 0.0001, 0.0001)
55    assert np.allclose(train_expect[0][4].asnumpy(), train_output[0][4].asnumpy(), 0.0001, 0.0001)
56    assert np.allclose(train_expect[1].data.asnumpy(), train_output[1].data.asnumpy(), 0.0001, 0.0001)
57    assert np.allclose(train_expect[2].data.asnumpy(), train_output[2].data.asnumpy(), 0.0001, 0.0001)
58
59def test_bn_infer():
60    x = np.random.normal(5, 1, [1, 2, 4, 4]).astype(np.float32)
61    weight = np.random.normal(5, 1, [2,]).astype(np.float32)
62    bias = np.random.normal(5, 1, [2,]).astype(np.float32)
63    moving_mean = np.random.normal(5, 1, [2,]).astype(np.float32)
64    moving_var = np.random.normal(5, 1, [2,]).astype(np.float32)
65
66    infer_expect = get_output(x, weight, bias, moving_mean, moving_var, False, False)
67    infer_output = get_output(x, weight, bias, moving_mean, moving_var, False, True)
68
69    assert np.allclose(infer_expect[0][0].asnumpy(), infer_output[0][0].asnumpy(), 0.0001, 0.0001)
70
71@pytest.mark.level0
72@pytest.mark.platform_x86_gpu_training
73@pytest.mark.env_onecard
74def test_bn_train_gpu():
75    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
76    test_bn_train()
77
78@pytest.mark.level0
79@pytest.mark.platform_x86_gpu_training
80@pytest.mark.env_onecard
81def test_bn_infer_gpu():
82    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
83    test_bn_infer()
84