• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15""" Test Dynamic Learning Rate """
16import pytest
17
18from mindspore import Tensor
19from mindspore.nn import learning_rate_schedule as lr_schedules
20from mindspore.common.api import _cell_graph_executor
21import mindspore.common.dtype as mstype
22
23
24learning_rate = 0.1
25end_learning_rate = 0.01
26decay_rate = 0.9
27decay_steps = 4
28warmup_steps = 2
29min_lr = 0.01
30max_lr = 0.1
31power = 0.5
32global_step = Tensor(2, mstype.int32)
33
34
35class TestInit:
36    def test_learning_rate_type(self):
37        lr = True
38        with pytest.raises(TypeError):
39            lr_schedules.ExponentialDecayLR(lr, decay_rate, decay_steps)
40
41        with pytest.raises(TypeError):
42            lr_schedules.PolynomialDecayLR(lr, end_learning_rate, decay_steps, power)
43
44    def test_learning_rate_value(self):
45        lr = -1.0
46        with pytest.raises(ValueError):
47            lr_schedules.ExponentialDecayLR(lr, decay_rate, decay_steps)
48
49        with pytest.raises(ValueError):
50            lr_schedules.PolynomialDecayLR(lr, end_learning_rate, decay_steps, power)
51
52    def test_end_learning_rate_type(self):
53        lr = True
54        with pytest.raises(TypeError):
55            lr_schedules.PolynomialDecayLR(learning_rate, lr, decay_steps, power)
56
57    def test_end_learning_rate_value(self):
58        lr = -1.0
59        with pytest.raises(ValueError):
60            lr_schedules.PolynomialDecayLR(learning_rate, lr, decay_steps, power)
61
62    def test_decay_rate_type(self):
63        rate = 'a'
64        with pytest.raises(TypeError):
65            lr_schedules.ExponentialDecayLR(learning_rate, rate, decay_steps)
66
67    def test_decay_rate_value(self):
68        rate = -1.0
69        with pytest.raises(ValueError):
70            lr_schedules.ExponentialDecayLR(learning_rate, rate, decay_steps)
71
72    def test_decay_steps_type(self):
73        decay_steps_e = 'm'
74        with pytest.raises(TypeError):
75            lr_schedules.ExponentialDecayLR(learning_rate, decay_rate, decay_steps_e)
76
77        with pytest.raises(TypeError):
78            lr_schedules.CosineDecayLR(min_lr, max_lr, decay_steps_e)
79
80        with pytest.raises(TypeError):
81            lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps_e, power)
82
83    def test_decay_steps_value(self):
84        decay_steps_e = -2
85        with pytest.raises(ValueError):
86            lr_schedules.ExponentialDecayLR(learning_rate, decay_rate, decay_steps_e)
87
88        with pytest.raises(ValueError):
89            lr_schedules.CosineDecayLR(min_lr, max_lr, decay_steps_e)
90
91        with pytest.raises(ValueError):
92            lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps_e, power)
93
94    def test_is_stair(self):
95        is_stair = 1
96        with pytest.raises(TypeError):
97            lr_schedules.ExponentialDecayLR(learning_rate, decay_rate, decay_steps, is_stair)
98
99    def test_min_lr_type(self):
100        min_lr1 = True
101        with pytest.raises(TypeError):
102            lr_schedules.CosineDecayLR(min_lr1, max_lr, decay_steps)
103
104    def test_min_lr_value(self):
105        min_lr1 = -1.0
106        with pytest.raises(ValueError):
107            lr_schedules.CosineDecayLR(min_lr1, max_lr, decay_steps)
108
109    def test_max_lr_type(self):
110        max_lr1 = 'a'
111        with pytest.raises(TypeError):
112            lr_schedules.CosineDecayLR(min_lr, max_lr1, decay_steps)
113
114    def test_max_lr_value(self):
115        max_lr1 = -1.0
116        with pytest.raises(ValueError):
117            lr_schedules.CosineDecayLR(min_lr, max_lr1, decay_steps)
118
119    def test_power(self):
120        power1 = True
121        with pytest.raises(TypeError):
122            lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power1)
123
124
125def test_exponential_decay():
126    lr_schedule = lr_schedules.ExponentialDecayLR(learning_rate, decay_rate, decay_steps, True)
127    _cell_graph_executor.compile(lr_schedule, global_step)
128
129
130def test_enatural_exp_decay():
131    lr_schedule = lr_schedules.NaturalExpDecayLR(learning_rate, decay_rate, decay_steps, True)
132    _cell_graph_executor.compile(lr_schedule, global_step)
133
134
135def test_inverse_decay():
136    lr_schedule = lr_schedules.InverseDecayLR(learning_rate, decay_rate, decay_steps, True)
137    _cell_graph_executor.compile(lr_schedule, global_step)
138
139
140def test_cosine_decay():
141    lr_schedule = lr_schedules.CosineDecayLR(min_lr, max_lr, decay_steps)
142    _cell_graph_executor.compile(lr_schedule, global_step)
143
144
145def test_polynomial_decay():
146    lr_schedule = lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power)
147    _cell_graph_executor.compile(lr_schedule, global_step)
148
149
150def test_polynomial_decay2():
151    lr_schedule = lr_schedules.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power, True)
152    _cell_graph_executor.compile(lr_schedule, global_step)
153
154
155def test_warmup():
156    lr_schedule = lr_schedules.WarmUpLR(learning_rate, warmup_steps)
157    _cell_graph_executor.compile(lr_schedule, global_step)
158