• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15""" test optimizer """
16import numpy as np
17import pytest
18
19from mindspore import Tensor
20from mindspore.common.parameter import Parameter
21from mindspore.nn.optim import Optimizer, SGD, Adam, AdamWeightDecay
22
23
24class IterableObjc:
25    def __iter__(self):
26        cont = 0
27        while cont < 3:
28            cont += 1
29            yield Parameter(Tensor(cont), name="cont")
30
31
32params = IterableObjc()
33
34
35class TestOptimizer():
36    def test_init(self):
37        Optimizer(0.5, params)
38        with pytest.raises(ValueError):
39            Optimizer(-0.5, params)
40
41    def test_construct(self):
42        opt_2 = Optimizer(0.5, params)
43        with pytest.raises(NotImplementedError):
44            opt_2.construct()
45
46
47class TestAdam():
48    """ TestAdam definition """
49
50    def test_init(self):
51        Adam(params, learning_rate=1e-3, beta1=0.9, beta2=0.999, eps=1e-8, use_locking=False,
52             use_nesterov=False, weight_decay=0.0, loss_scale=1.0)
53
54    def test_construct(self):
55        with pytest.raises(RuntimeError):
56            gradient = Tensor(np.zeros([1, 2, 3]))
57            adam = Adam(params, learning_rate=1e-3, beta1=0.9, beta2=0.999, eps=1e-8, use_locking=False,
58                        use_nesterov=False, weight_decay=0.0, loss_scale=1.0)
59            adam(gradient)
60
61
62class TestSGD():
63    """ TestSGD definition """
64
65    def test_init(self):
66        with pytest.raises(ValueError):
67            SGD(params, learning_rate=0.1, momentum=-0.1, dampening=0, weight_decay=0, nesterov=False)
68        with pytest.raises(ValueError):
69            SGD(params, learning_rate=0.12, momentum=-0.1, dampening=0, weight_decay=0, nesterov=False)
70        SGD(params)
71
72
73class TestNullParam():
74    """ TestNullParam definition """
75
76    def test_optim_init(self):
77        with pytest.raises(ValueError):
78            Optimizer(0.1, None)
79
80    def test_AdamWightDecay_init(self):
81        with pytest.raises(ValueError):
82            AdamWeightDecay(None)
83
84    def test_Sgd_init(self):
85        with pytest.raises(ValueError):
86            SGD(None)
87
88
89class TestUnsupportParam():
90    """ TestUnsupportParam definition """
91
92    def test_optim_init(self):
93        with pytest.raises(TypeError):
94            Optimizer(0.1, (1, 2, 3))
95
96    def test_AdamWightDecay_init(self):
97        with pytest.raises(TypeError):
98            AdamWeightDecay(9)
99
100    def test_Sgd_init(self):
101        with pytest.raises(TypeError):
102            paramsTensor = Parameter(Tensor(np.zeros([1, 2, 3])), "x")
103            SGD(paramsTensor)
104