1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14 15""" 16@File : test_data_parallel_dense.py 17@Desc : test data parallel dense 18""" 19import numpy as np 20 21import mindspore.context as context 22import mindspore.nn as nn 23from mindspore import Tensor 24from mindspore.common.api import _cell_graph_executor 25from mindspore.nn import Momentum 26from mindspore.nn import TrainOneStepCell, WithLossCell 27from mindspore.ops import operations as P 28from mindspore.context import ParallelMode 29 30 31class DenseMMNet(nn.Cell): 32 """DenseMMNet definition""" 33 34 def __init__(self): 35 super(DenseMMNet, self).__init__() 36 self.fc1 = nn.Dense(128, 768, activation='relu') 37 self.fc2 = nn.Dense(128, 768, activation='relu') 38 self.fc3 = nn.Dense(128, 768, activation='relu') 39 self.fc4 = nn.Dense(768, 768, activation='relu') 40 self.relu4 = nn.ReLU() 41 self.relu5 = nn.ReLU() 42 self.transpose = P.Transpose() 43 self.matmul1 = P.MatMul() 44 self.matmul2 = P.MatMul() 45 46 def construct(self, x): 47 q = self.fc1(x) 48 k = self.fc2(x) 49 v = self.fc3(x) 50 k = self.transpose(k, (1, 0)) 51 c = self.relu4(self.matmul1(q, k)) 52 s = self.relu5(self.matmul2(c, v)) 53 s = self.fc4(s) 54 return s 55 56 57def test_data_parallel_dense(): 58 """test_data_parallel_dense""" 59 context.set_context(mode=context.GRAPH_MODE) 60 context.reset_auto_parallel_context() 61 context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True, device_num=8) 62 inp = Tensor(np.ones([32, 128]).astype(np.float32) * 0.01) 63 label = Tensor(np.zeros([32, 768]).astype(np.float32)) 64 net = DenseMMNet() 65 loss_fn = nn.SoftmaxCrossEntropyWithLogits() 66 67 optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 68 learning_rate=0.1, 69 momentum=0.9) 70 net = WithLossCell(net, loss_fn) 71 net = TrainOneStepCell(net, optimizer) 72 73 _cell_graph_executor.compile(net, inp, label) 74 context.reset_auto_parallel_context() 75