1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15""" test adam """ 16import numpy as np 17import pytest 18 19import mindspore.nn as nn 20from mindspore import Tensor, Parameter 21from mindspore.common.api import _cell_graph_executor 22from mindspore.nn import TrainOneStepCell, WithLossCell 23from mindspore.nn.optim import RMSProp 24from mindspore.ops import operations as P 25 26 27class Net(nn.Cell): 28 """ Net definition """ 29 30 def __init__(self): 31 super(Net, self).__init__() 32 self.weight = Parameter(Tensor(np.ones([64, 10]).astype(np.float32)), name="weight") 33 self.bias = Parameter(Tensor(np.ones([10]).astype((np.float32))), name="bias") 34 self.matmul = P.MatMul() 35 self.biasAdd = P.BiasAdd() 36 37 def construct(self, x): 38 x = self.biasAdd(self.matmul(x, self.weight), self.bias) 39 return x 40 41 42def test_rmsprop_compile(): 43 """ test_adamw_compile """ 44 inputs = Tensor(np.ones([1, 64]).astype(np.float32)) 45 label = Tensor(np.zeros([1, 10]).astype(np.float32)) 46 net = Net() 47 net.set_train() 48 49 loss = nn.SoftmaxCrossEntropyWithLogits() 50 optimizer = RMSProp(net.trainable_params(), learning_rate=0.1) 51 52 net_with_loss = WithLossCell(net, loss) 53 train_network = TrainOneStepCell(net_with_loss, optimizer) 54 _cell_graph_executor.compile(train_network, inputs, label) 55 56 57def test_rmsprop_e(): 58 net = Net() 59 with pytest.raises(ValueError): 60 RMSProp(net.get_parameters(), momentum=-0.1, learning_rate=0.1, weight_decay=0.9) 61 62 with pytest.raises(TypeError): 63 RMSProp(net.get_parameters(), momentum=1, learning_rate=0.1, weight_decay=0.9) 64