1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15""" test_training """ 16import numpy as np 17 18import mindspore.nn as nn 19from mindspore import context 20from mindspore.common.tensor import Tensor 21from mindspore.nn import WithGradCell, WithLossCell 22from mindspore.ops import operations as P 23from ..ut_filter import non_graph_engine 24 25 26def setup_module(module): 27 context.set_context(mode=context.PYNATIVE_MODE) 28 29 30class LeNet5(nn.Cell): 31 """ LeNet5 definition """ 32 33 def __init__(self): 34 super(LeNet5, self).__init__() 35 self.conv1 = nn.Conv2d(1, 6, 5, pad_mode='valid') 36 self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid') 37 self.fc1 = nn.Dense(16 * 5 * 5, 120) 38 self.fc2 = nn.Dense(120, 84) 39 self.fc3 = nn.Dense(84, 10) 40 self.relu = nn.ReLU() 41 self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) 42 self.flatten = P.Flatten() 43 44 def construct(self, x): 45 x = self.max_pool2d(self.relu(self.conv1(x))) 46 x = self.max_pool2d(self.relu(self.conv2(x))) 47 x = self.flatten(x) 48 x = self.relu(self.fc1(x)) 49 x = self.relu(self.fc2(x)) 50 x = self.fc3(x) 51 return x 52 53 54@non_graph_engine 55def test_loss_cell_wrapper(): 56 """ test_loss_cell_wrapper """ 57 data = Tensor(np.ones([1, 1, 32, 32]).astype(np.float32) * 0.01) 58 label = Tensor(np.ones([1, 10]).astype(np.float32)) 59 net = LeNet5() 60 loss_fn = nn.SoftmaxCrossEntropyWithLogits() 61 loss_net = WithLossCell(net, loss_fn) 62 loss_out = loss_net(data, label) 63 assert loss_out.asnumpy().dtype == 'float32' or loss_out.asnumpy().dtype == 'float64' 64 65 66@non_graph_engine 67def test_grad_cell_wrapper(): 68 """ test_grad_cell_wrapper """ 69 data = Tensor(np.ones([1, 1, 32, 32]).astype(np.float32) * 0.01) 70 label = Tensor(np.ones([1, 10]).astype(np.float32)) 71 dout = Tensor(np.ones([1]).astype(np.float32)) 72 net = LeNet5() 73 loss_fn = nn.SoftmaxCrossEntropyWithLogits() 74 grad_net = WithGradCell(net, loss_fn, dout) 75 gradients = grad_net(data, label) 76 assert isinstance(gradients[0].asnumpy()[0][0][0][0], (np.float32, np.float64)) 77