1# Copyright 2019 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15import numpy as np 16import pytest 17 18import mindspore.nn as nn 19from mindspore import Tensor, Parameter 20from mindspore.common import dtype as mstype 21from mindspore.common.api import _cell_graph_executor 22from mindspore.nn import TrainOneStepCell, WithLossCell, ParameterUpdate 23from mindspore.nn.optim import Momentum 24from mindspore.ops import operations as P 25 26 27class Net(nn.Cell): 28 def __init__(self): 29 super(Net, self).__init__() 30 self.weight = Parameter(Tensor(np.ones([64, 10]).astype(np.float32)), name="weight") 31 self.bias = Parameter(Tensor(np.ones([10]).astype((np.float32))), name="bias") 32 self.matmul = P.MatMul() 33 self.biasAdd = P.BiasAdd() 34 35 def construct(self, x): 36 x = self.biasAdd(self.matmul(x, self.weight), self.bias) 37 return x 38 39 40def test_parameter_update_int32_and_tensor(): 41 """ test_parameter_update """ 42 net = Net() 43 loss = nn.SoftmaxCrossEntropyWithLogits() 44 optimizer = Momentum(net.get_parameters(), Tensor(np.array([0.1, 0.01, 0.001]), mstype.float32), 0.001) 45 46 net_with_loss = WithLossCell(net, loss) 47 train_network = TrainOneStepCell(net_with_loss, optimizer) 48 49 # compile train graph 50 train_network.set_train() 51 inputs = Tensor(np.ones([1, 64]).astype(np.float32)) 52 label = Tensor(np.zeros([1, 10]).astype(np.float32)) 53 _cell_graph_executor.compile(train_network, inputs, label) 54 55 # test tensor 56 param_lr = train_network.parameters_dict()['learning_rate'] 57 update_network = ParameterUpdate(param_lr) 58 update_network.phase = 'update_param' 59 60 input_lr = Tensor(np.array([0.2, 0.02, 0.002]), mstype.float32) 61 _cell_graph_executor.compile(update_network, input_lr) 62 63 # test int32 64 param_step = train_network.parameters_dict()['global_step'] 65 update_global_step = ParameterUpdate(param_step) 66 67 input_step = Tensor(np.array([1000]), mstype.int32) 68 _cell_graph_executor.compile(update_global_step, input_step) 69 70 71def test_parameter_update_float32(): 72 """ test_parameter_update """ 73 net = Net() 74 loss = nn.SoftmaxCrossEntropyWithLogits() 75 optimizer = Momentum(net.get_parameters(), 0.01, 0.001) 76 77 net_with_loss = WithLossCell(net, loss) 78 train_network = TrainOneStepCell(net_with_loss, optimizer) 79 80 # compile train graph 81 train_network.set_train() 82 inputs = Tensor(np.ones([1, 64]).astype(np.float32)) 83 label = Tensor(np.zeros([1, 10]).astype(np.float32)) 84 _cell_graph_executor.compile(train_network, inputs, label) 85 86 # construct and compile update graph 87 param_lr = train_network.parameters_dict()['learning_rate'] 88 update_network = ParameterUpdate(param_lr) 89 update_network.phase = 'update_param' 90 91 input_lr = Tensor(0.0001, mstype.float32) 92 _cell_graph_executor.compile(update_network, input_lr) 93 94 95def test_parameter_update_error(): 96 """ test_parameter_update """ 97 input_np = np.array([1]) 98 99 with pytest.raises(TypeError): 100 ParameterUpdate(input_np) 101