• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15import numpy as np
16import pytest
17
18import mindspore.context as context
19import mindspore.nn as nn
20from mindspore import Tensor
21from mindspore.nn import TrainOneStepCell, WithLossCell
22from mindspore.nn.optim import Momentum
23from mindspore.ops import operations as P
24
25context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
26
27
28class LeNet(nn.Cell):
29    def __init__(self):
30        super(LeNet, self).__init__()
31        self.relu = P.ReLU()
32        self.batch_size = 32
33
34        self.conv1 = nn.Conv2d(1, 6, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid')
35        self.conv2 = nn.Conv2d(6, 16, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid')
36        self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
37        self.pool.recompute()
38        self.reshape = P.Reshape()
39        self.fc1 = nn.Dense(400, 120)
40        self.fc2 = nn.Dense(120, 84)
41        self.fc3 = nn.Dense(84, 10)
42
43    def construct(self, input_x):
44        output = self.conv1(input_x)
45        output = self.relu(output)
46        output = self.pool(output)
47        output = self.conv2(output)
48        output = self.relu(output)
49        output = self.pool(output)
50        output = self.reshape(output, (self.batch_size, -1))
51        output = self.fc1(output)
52        output = self.relu(output)
53        output = self.fc2(output)
54        output = self.relu(output)
55        output = self.fc3(output)
56        return output
57
58
59def train(net, data, label):
60    learning_rate = 0.01
61    momentum = 0.9
62
63    optimizer = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), learning_rate, momentum)
64    criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
65    net_with_criterion = WithLossCell(net, criterion)
66    train_network = TrainOneStepCell(net_with_criterion, optimizer)  # optimizer
67    train_network.set_train()
68    res = train_network(data, label)
69    print("+++++++++Loss+++++++++++++")
70    print(res)
71    print("+++++++++++++++++++++++++++")
72    diff = res.asnumpy() - 2.302585
73    assert np.all(diff < 1.e-6)
74
75
76@pytest.mark.level0
77@pytest.mark.platform_x86_cpu
78@pytest.mark.env_onecard
79def test_lenet():
80    data = Tensor(np.ones([32, 1, 32, 32]).astype(np.float32) * 0.01)
81    label = Tensor(np.ones([32]).astype(np.int32))
82    net = LeNet()
83    train(net, data, label)
84