1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15""" test FTRL """ 16import pytest 17import numpy as np 18 19import mindspore.nn as nn 20from mindspore import Tensor, Parameter, context 21from mindspore.common.api import _cell_graph_executor 22from mindspore.nn import TrainOneStepCell, WithLossCell 23from mindspore.nn.optim import FTRL 24from mindspore.ops import operations as P 25 26@pytest.fixture(scope="module", autouse=True) 27def setup_teardown(): 28 context.set_context(enable_sparse=True) 29 yield 30 context.set_context(enable_sparse=False) 31 32 33class Net(nn.Cell): 34 def __init__(self): 35 super(Net, self).__init__() 36 self.weight = Parameter(Tensor(np.ones([64, 10]).astype(np.float32)), name='weight') 37 self.bias = Parameter(Tensor(np.ones([10]).astype(np.float32)), name='bias') 38 self.matmul = P.MatMul() 39 self.biasAdd = P.BiasAdd() 40 41 def construct(self, x): 42 x = self.biasAdd(self.matmul(x, self.weight), self.bias) 43 return x 44 45 46class NetWithSparseGatherV2(nn.Cell): 47 """ NetWithSparseGatherV2 definition """ 48 def __init__(self): 49 super(NetWithSparseGatherV2, self).__init__() 50 self.weight1 = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="weight1") 51 self.weight2 = Parameter(Tensor(np.ones([2, 1, 2]).astype((np.float32))), name="weight2") 52 self.axis = 0 53 self.gather = P.SparseGatherV2() 54 55 def construct(self, indices, label): 56 return self.gather(self.weight1, indices, self.axis) + self.weight2 57 58 59def test_ftrl(): 60 """ test_ftrl """ 61 inputs = Tensor(np.ones([1, 64]).astype(np.float32)) 62 label = Tensor(np.zeros([1, 10]).astype(np.float32)) 63 net = Net() 64 net.set_train() 65 loss = nn.SoftmaxCrossEntropyWithLogits() 66 optimizer = FTRL(net.trainable_params(), weight_decay=0.9, loss_scale=2.0) 67 net_with_loss = WithLossCell(net, loss) 68 train_network = TrainOneStepCell(net_with_loss, optimizer) 69 _cell_graph_executor.compile(train_network, inputs, label) 70 71 72def test_spares_ftrl_compile(): 73 """ test sparse ftrl compile """ 74 indices = Tensor(np.array([0, 1]).astype(np.int32)) 75 label = Tensor(np.zeros([2, 1, 2]).astype(np.float32)) 76 net = NetWithSparseGatherV2() 77 net.set_train() 78 79 optimizer = FTRL(net.trainable_params(), weight_decay=0.9, loss_scale=2.0) 80 optimizer.target = 'CPU' 81 train_network = TrainOneStepCell(net, optimizer) 82 _cell_graph_executor.compile(train_network, indices, label) 83 84 85def test_spares_ftrl(): 86 """ test sparse ftrl""" 87 indices = Tensor(np.array([0, 1]).astype(np.int32)) 88 label = Tensor(np.zeros([2, 1, 2]).astype(np.float32)) 89 net = NetWithSparseGatherV2() 90 net.set_train() 91 92 optimizer = FTRL(net.trainable_params(), weight_decay=0.9, loss_scale=2.0) 93 optimizer.target = 'Ascend' 94 train_network = TrainOneStepCell(net, optimizer) 95 _cell_graph_executor.compile(train_network, indices, label) 96