• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15import pytest
16from mindspore.nn import Cell
17from mindspore import context, Tensor, Parameter
18import mindspore.ops.operations as P
19import mindspore as ms
20import numpy as np
21
22context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
23
24class AutoMonadAddnAdamNet(Cell):
25    def __init__(self, var, m, v):
26        super().__init__()
27        self.apply_adam = P.Adam()
28        self.var = Parameter(var, name="var")
29        self.m = Parameter(m, name="m")
30        self.v = Parameter(v, name="v")
31        self.addn = P.AddN()
32        self.mul = P.Mul()
33
34    def construct(self, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad):
35        out = self.addn((self.var, self.m, self.v))
36        self.apply_adam(self.var, self.m, self.v, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad)
37        return out, self.var, self.m, self.v
38
39
40def _count_unequal_element(data_expected, data_me, rtol, atol):
41    assert data_expected.shape == data_me.shape
42    total_count = len(data_expected.flatten())
43    error = np.abs(data_expected - data_me)
44    greater = np.greater(error, atol + np.abs(data_me) * rtol)
45    loss_count = np.count_nonzero(greater)
46    assert (loss_count / total_count) < rtol, \
47        "\ndata_expected_std:{0}\ndata_me_error:{1}\nloss:{2}". \
48            format(data_expected[greater], data_me[greater], error[greater])
49
50
51def allclose_nparray(data_expected, data_me, rtol, atol, equal_nan=True):
52    if np.any(np.isnan(data_expected)):
53        assert np.allclose(data_expected, data_me, rtol, atol, equal_nan=equal_nan)
54    elif not np.allclose(data_expected, data_me, rtol, atol, equal_nan=equal_nan):
55        _count_unequal_element(data_expected, data_me, rtol, atol)
56    else:
57        assert True
58
59
60@pytest.mark.level0
61@pytest.mark.platform_arm_ascend_training
62@pytest.mark.platform_x86_ascend_training
63@pytest.mark.env_onecard
64def test_auto_monad_addn_adam():
65    var = Tensor(np.random.rand(3, 3, 3).astype(np.float32))
66    m = Tensor(np.random.rand(3, 3, 3).astype(np.float32))
67    v = Tensor(np.random.rand(3, 3, 3).astype(np.float32))
68    net = AutoMonadAddnAdamNet(var, m, v)
69    beta1_power = Tensor(0.9, ms.float32)
70    beta2_power = Tensor(0.999, ms.float32)
71    lr = Tensor(0.1, ms.float32)
72    beta1 = Tensor(0.9, ms.float32)
73    beta2 = Tensor(0.999, ms.float32)
74    epsilon = Tensor(1e-8, ms.float32)
75    grad = Tensor(np.random.rand(3, 3, 3).astype(np.float32))
76    out, new_var, new_m, new_v = net(beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad)
77    net = AutoMonadAddnAdamNet(var, m, v)
78    context.set_context(mode=context.PYNATIVE_MODE)
79    out_pyn, new_var_pyn, new_m_pyn, new_v_pyn = net(beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad)
80    allclose_nparray(out_pyn.asnumpy(), out.asnumpy(), 0.001, 0.001)
81    allclose_nparray(new_var_pyn.asnumpy(), new_var.asnumpy(), 0.001, 0.001)
82    allclose_nparray(new_m_pyn.asnumpy(), new_m.asnumpy(), 0.001, 0.001)
83    allclose_nparray(new_v_pyn.asnumpy(), new_v.asnumpy(), 0.001, 0.001)
84
85
86class AutoMonadTwoAssignTwoAddnDependencyNet(Cell):
87    def __init__(self):
88        super().__init__()
89        self.parameter1 = ms.Parameter(Tensor([1.0], ms.float32), name="parameter1")
90        self.parameter2 = ms.Parameter(Tensor([3.0], ms.float32), name="parameter2")
91        self.assign = P.Assign()
92        self.addN = P.AddN()
93
94    def construct(self, inputs):
95        self.assign(self.parameter1, inputs)
96        out = self.addN((inputs, self.parameter1, self.parameter2))
97        self.assign(self.parameter2, inputs)
98        out = self.addN((out, self.parameter1, self.parameter2))
99        return out
100
101
102class AutoMonadTwoAssignTwoAddnDependencyBenchmarkNet(Cell):
103    def __init__(self):
104        super().__init__()
105        self.parameter2 = ms.Parameter(Tensor([3.0], ms.float32), name="parameter2")
106        self.addN = P.AddN()
107
108    def construct(self, inputs):
109        out = self.addN((inputs, inputs, self.parameter2))
110        out = self.addN((out, inputs, inputs))
111        return out
112
113
114@pytest.mark.level0
115@pytest.mark.platform_arm_ascend_training
116@pytest.mark.platform_x86_ascend_training
117@pytest.mark.env_onecard
118def test_auto_monad_read_dependency_two_assign_two_addn():
119    net = AutoMonadTwoAssignTwoAddnDependencyNet()
120    benchmarknet = AutoMonadTwoAssignTwoAddnDependencyBenchmarkNet()
121    out1 = net(Tensor([9.0], ms.float32))
122    out2 = benchmarknet(Tensor([9.0], ms.float32))
123    allclose_nparray(out1.asnumpy(), out2.asnumpy(), 0.001, 0.001)
124