1# Copyright 2022 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15 16import numpy as np 17import pytest 18 19import mindspore.context as context 20from .optimizer_utils import FakeNet, build_network, default_fc1_weight_rprop, default_fc1_bias_rprop, \ 21 no_default_fc1_weight_rprop, no_default_fc1_bias_rprop, default_group_fc1_weight_rprop, default_group_fc1_bias_rprop 22 23 24@pytest.mark.level0 25@pytest.mark.platform_x86_cpu 26@pytest.mark.platform_arm_cpu 27@pytest.mark.platform_x86_gpu_training 28@pytest.mark.platform_arm_ascend_training 29@pytest.mark.platform_x86_ascend_training 30@pytest.mark.env_onecard 31@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE]) 32def test_default_rprop(mode): 33 """ 34 Feature: Test Rprop optimizer 35 Description: Test Rprop with default parameter 36 Expectation: Loss values and parameters conform to preset values. 37 """ 38 context.set_context(mode=mode) 39 config = {'name': 'Rprop', 'lr': 0.01, 'etas': (0.5, 1.2), 'step_sizes': (1e-6, 50.), 'weight_decay': 0.0} 40 _, cells = build_network(config, net=FakeNet()) 41 assert np.allclose(cells.prev[0].asnumpy(), default_fc1_weight_rprop, atol=1.e-2) 42 assert np.allclose(cells.prev[1].asnumpy(), default_fc1_bias_rprop, atol=1.e-2) 43 44 45@pytest.mark.level0 46@pytest.mark.platform_x86_cpu 47@pytest.mark.platform_arm_cpu 48@pytest.mark.platform_x86_gpu_training 49@pytest.mark.platform_arm_ascend_training 50@pytest.mark.platform_x86_ascend_training 51@pytest.mark.env_onecard 52@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE]) 53def test_no_default_rprop(mode): 54 """ 55 Feature: Test Rprop optimizer 56 Description: Test Rprop with another set of parameter 57 Expectation: Loss values and parameters conform to preset values. 58 """ 59 context.set_context(mode=mode) 60 config = {'name': 'Rprop', 'lr': 0.01, 'etas': (0.6, 1.9), 'step_sizes': (1e-3, 20.), 'weight_decay': 0.0} 61 _, cells = build_network(config, net=FakeNet()) 62 assert np.allclose(cells.prev[0].asnumpy(), no_default_fc1_weight_rprop, atol=1.e-2) 63 assert np.allclose(cells.prev[1].asnumpy(), no_default_fc1_bias_rprop, atol=1.e-2) 64 65 66@pytest.mark.level0 67@pytest.mark.platform_x86_cpu 68@pytest.mark.platform_arm_cpu 69@pytest.mark.platform_x86_gpu_training 70@pytest.mark.platform_arm_ascend_training 71@pytest.mark.platform_x86_ascend_training 72@pytest.mark.env_onecard 73@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE]) 74def test_default_rprop_group(mode): 75 """ 76 Feature: Test Rprop optimizer 77 Description: Test Rprop with parameter grouping 78 Expectation: Loss values and parameters conform to preset values. 79 """ 80 context.set_context(mode=mode) 81 config = {'name': 'Rprop', 'lr': 0.1, 'etas': (0.6, 1.9), 'step_sizes': (1e-2, 10.), 'weight_decay': 0.0} 82 _, cells = build_network(config, net=FakeNet(), is_group=True) 83 assert np.allclose(cells.prev[0].asnumpy(), default_group_fc1_weight_rprop, atol=1.e-2) 84 assert np.allclose(cells.prev[1].asnumpy(), default_group_fc1_bias_rprop, atol=1.e-2) 85