1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15import numpy as np 16 17import mindspore.nn as nn 18from mindspore import Tensor, context 19from mindspore.nn import Momentum 20from mindspore.nn import WithLossCell, TrainOneStepCell 21from mindspore.ops import operations as P 22from mindspore.parallel._cost_model_context import set_cost_model_context 23 24 25class Net(nn.Cell): 26 def __init__(self, input_ch, out_ch): 27 super(Net, self).__init__() 28 self.dense = nn.Dense(input_ch, out_ch) 29 self.relu = P.ReLU() 30 31 def construct(self, x): 32 x = self.dense(x) 33 x = self.relu(x) 34 return x 35 36 37def test_inference_phase(): 38 context.set_auto_parallel_context(device_num=8, global_rank=0) 39 context.set_auto_parallel_context(parallel_mode="auto_parallel") 40 set_cost_model_context(run_phase=1) 41 42 net = Net(512, 128) 43 predict = Tensor(np.ones([64, 512]).astype(np.float32) * 0.001) 44 label = Tensor(np.ones([64, 128]).astype(np.float32)) 45 46 loss = nn.SoftmaxCrossEntropyWithLogits() 47 optimizer = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) 48 net_with_loss = WithLossCell(net, loss) 49 train_network = TrainOneStepCell(net_with_loss, optimizer) 50 train_network.set_train() 51 train_network.set_auto_parallel() 52 53 _ = train_network(predict, label) 54