1# Copyright 2019 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14 15import numpy as np 16 17import mindspore as ms 18import mindspore.nn as nn 19from mindspore import Tensor, Parameter 20from mindspore import context 21from mindspore.common import dtype as mstype 22from mindspore.common.api import _cell_graph_executor 23from mindspore.nn.loss.loss import LossBase 24from mindspore.ops import composite as C 25from mindspore.ops import operations as P 26from tests.ut.python.ops.test_math_ops import VirtualLoss 27 28 29grad_all = C.GradOperation(get_all=True) 30 31 32class NetWithLoss(nn.Cell): 33 def __init__(self, network): 34 super(NetWithLoss, self).__init__() 35 self.loss = VirtualLoss() 36 self.network = network 37 38 def construct(self, x, y): 39 predict = self.network(x, y) 40 return self.loss(predict) 41 42 43class GradWrap(nn.Cell): 44 def __init__(self, network): 45 super(GradWrap, self).__init__() 46 self.network = network 47 48 def construct(self, x, y): 49 return grad_all(self.network)(x, y) 50 51 52class CustomMatMul(nn.Cell): 53 def __init__(self, transpose_a=False, transpose_b=False): 54 super(CustomMatMul, self).__init__() 55 self.fc = P.MatMul(transpose_a=transpose_a, transpose_b=transpose_b) 56 57 def construct(self, x1, x2): 58 out = self.fc(x1, x2) 59 return out 60 61 62class MarginCE(LossBase): 63 def __init__(self): 64 super(MarginCE, self).__init__() 65 self.fc = CustomMatMul(transpose_b=True) 66 self.fc1 = CustomMatMul(transpose_b=True) 67 self.fc2 = CustomMatMul(transpose_b=True) 68 self.fc3 = CustomMatMul(transpose_b=True) 69 self.fc4 = CustomMatMul(transpose_b=True) 70 self.param = Parameter(Tensor(np.ones([512, 512]), dtype=mstype.float32), name="param", requires_grad=False) 71 self.param2 = Parameter(Tensor(np.ones([512, 512]), dtype=mstype.float32), name="param", requires_grad=False) 72 73 def construct(self, feature, label): 74 fc_out = self.fc(feature, label) 75 76 fc1_out = self.fc1(self.param2, self.param) 77 fc2_out = self.fc2(fc1_out, fc_out) 78 fc3_out = self.fc3(fc1_out, fc_out) 79 fc4_out = self.fc4(fc2_out, fc3_out) 80 return fc4_out 81 82 83def test_marin_loss(): 84 context.set_auto_parallel_context(device_num=4, global_rank=0) 85 86 x = Tensor(np.ones([512, 512]), dtype=ms.float32) 87 y = Tensor(np.ones([512, 512]), dtype=ms.float32) 88 89 net = GradWrap(NetWithLoss(MarginCE())) 90 context.set_auto_parallel_context(parallel_mode="auto_parallel") 91 net.set_auto_parallel() 92 net.set_train() 93 _cell_graph_executor.compile(net, x, y) 94